new subtitle renderer, experimental

This commit is contained in:
ThaUnknown 2020-10-16 01:20:43 +02:00
parent 5d3a60c109
commit 3f4e6bd6c3
11 changed files with 11001 additions and 27 deletions

View file

@ -45,7 +45,9 @@
right: 2rem;
width: 25%;
transition: width .2s ease;
z-index: 10
z-index: 10;
flex-direction: column;
justify-content: center;
}
#player>a {
@ -116,6 +118,7 @@
#video {
width: 100%;
height: 100%
}
.volume {

View file

@ -135,7 +135,7 @@
</div>
</div>
<div class="overflow-y-hidden content-wrapper">
<section id="player">
<section id="player" class="overflow-hidden">
<video id="video">
</video>
<a href="#player" class="w-full h-full"></a>
@ -354,7 +354,8 @@
<script src="js/rangeParser.js"></script>
<script src="js/torrentHandler.js"></script>
<script src="https://cdn.jsdelivr.net/npm/matroska-subtitles@3.1.0/dist/matroska-subtitles.min.js"></script>
<script src="js/subtitleHandler.js"></script>
<script src="js/subtitles-octopus.js"></script>
<script src="js/subtitleOctopus.js"></script>
</body>
</html>

View file

@ -23,7 +23,9 @@ function resetVideo() {
video.remove()
nowPlayingDisplay.textContent = ""
tracks = []
headers = []
subtitles = []
headers = undefined
subtitleStream = undefined
dl.removeAttribute("href")
dl.removeAttribute("download")
video = document.createElement("video")
@ -338,7 +340,7 @@ function selectLang(lang) {
for (let track of video.textTracks) {
if (track.language == lang) {
track.mode = 'showing';
displayHeaderr(headers[tracks.indexOf(track)])
displayHeader(headers[tracks.indexOf(track)])
}
else {
track.mode = 'hidden';

View file

@ -9,22 +9,23 @@ function parseSubs(stream) {
subtitleStream = new SubtitleStream(subtitleStream)
} else {
subtitleStream = new SubtitleStream()
subtitleStream.once('tracks', pTracks => {
pTracks.forEach(track => {
tracks[track.number] = video.addTextTrack('captions', track.type, track.language);
parseHeader(track.header, track.number);
let spacerCue = new VTTCue(0.1, 9999, "&nbsp;")
spacerCue.line = -1
tracks[track.number].addCue(spacerCue)
})
if (video.textTracks[0]) {
video.textTracks[0].mode = "showing"
displayHeader(headers[3])
}
})
// subtitleStream.once('tracks', pTracks => {
// pTracks.forEach(track => {
// tracks[track.number] = video.addTextTrack('captions', track.type, track.language);
// parseHeader(track.header, track.number);
// let spacerCue = new VTTCue(0.1, 9999, "&nbsp;")
// spacerCue.line = -1
// tracks[track.number].addCue(spacerCue)
// })
// if (video.textTracks[0]) {
// video.textTracks[0].mode = "showing"
// displayHeader(headers[3])
// }
// })
}
subtitleStream.on('subtitle', function (subtitle, trackNumber) {
subConvt(subtitle, trackNumber)
// subConvt(subtitle, trackNumber)
console.log(subtitle)
})
stream.pipe(subtitleStream)
}
@ -148,7 +149,6 @@ function subConvt(result, trackNumber) {
while (tagsToClose.length > 0) {
content += '</' + tagsToClose.pop() + '>';
}
console.log(result.style)
if (!positioned && headers[trackNumber].styles[result.style][headers[trackNumber].format.indexOf("Alignment")]) {
let posNum = Number(headers[trackNumber].styles[result.style][headers[trackNumber].format.indexOf("Alignment")]);
if (Math.floor((posNum - 1) / 3) == 1) {
@ -188,10 +188,10 @@ function displayHeader(header) {
substyles.innerHTML = ""
for (let style of Object.values(header.styles)) {
let bordCol
style[header.format.indexOf("BackColour")] ? bordCol = style[header.format.indexOf("BackColour")].split("").reverse().join("").slice(0, -2) : "#000"
style[header.format.indexOf("OutlineColour")] ? bordCol = style[header.format.indexOf("OutlineColour")].match(/[\s\S]{1,2}/g).reverse().join("").slice(0, -2) : "#000"
substyles.innerHTML += `
video::cue(.${style[header.format.indexOf("Name")]}) {
color: #${style[header.format.indexOf("PrimaryColour")] ? style[header.format.indexOf("PrimaryColour")].split("").reverse().join("").slice(0, -2) : ""} !important;
color: #${style[header.format.indexOf("PrimaryColour")] ? style[header.format.indexOf("PrimaryColour")].match(/[\s\S]{1,2}/g).reverse().join("").slice(0, -2) : ""} !important;
text-shadow: 2px 2px 0 #${bordCol},
2px -2px 0 #${bordCol},
-2px 2px 0 #${bordCol},
@ -201,9 +201,11 @@ video::cue(.${style[header.format.indexOf("Name")]}) {
-2px 0px 0 #${bordCol},
0px -2px 0 #${bordCol},
2px 2px 2px #${bordCol};
font-weight: ${style[header.format.indexOf("Bold")] ? style[header.format.indexOf("Bold")] * -1 ? "bold" : "normal" : ""} !important;
font-weight: ${style[header.format.indexOf("Bold")] ? style[header.format.indexOf("Bold")] * -1 ? "400" : "300" : ""} !important;
font-style: ${style[header.format.indexOf("Italic")] ? style[header.format.indexOf("Italic")] * -1 ? "italic" : "normal" : ""} !important;
background: ${style[header.format.indexOf("BorderStyle")] ? style[header.format.indexOf("BorderStyle")] != 3 ? "none" : `#${bordCol}` : ""} !important;
}`
}
}
}
// font-weight: ${style[header.format.indexOf("Bold")] ? style[header.format.indexOf("Bold")] * -1 ? "bold" : "normal" : ""} !important;

63
app/js/subtitleOctopus.js Normal file
View file

@ -0,0 +1,63 @@
let tracks = [],
headers,
subtitles = [],
subtitleStream,
octopusInstance
const { SubtitleStream } = MatroskaSubtitles
function subStream(stream) {
if (video.src.endsWith(".mkv")) {
if (subtitleStream) {
subtitleStream = new SubtitleStream(subtitleStream)
} else {
subtitleStream = new SubtitleStream()
subtitleStream.once('tracks', pTracks => {
pTracks.forEach(track => {
if (track.type == "ass") {
if (!headers) {
headers = []
}
headers[track.number] = track.header
subtitles[track.number] = []
} else {
tracks[track.number] = video.addTextTrack('captions', track.type, track.language);
let spacerCue = new VTTCue(0.1, 9999, "&nbsp;")
spacerCue.line = -1
tracks[track.number].addCue(spacerCue)
}
})
if (video.textTracks[0]) {
video.textTracks[0].mode = "showing"
}
})
}
subtitleStream.on('subtitle', function (subtitle, trackNumber) {
let formatSub = "Dialogue: " + subtitle.layer + "," + new Date(subtitle.time).toISOString().slice(12, -1).slice(0,-1) + "," + new Date(subtitle.time + subtitle.duration).toISOString().slice(12, -1).slice(0,-1) + "," + subtitle.style + "," + subtitle.name + "," + subtitle.marginL + "," + subtitle.marginR + "," + subtitle.marginV + "," + subtitle.effect + "," + subtitle.text
if (headers) {
if (!subtitles[trackNumber].includes(formatSub)) {
subtitles[trackNumber].push(formatSub)
renderSubs(3)
}
} else {
if (!Object.values(tracks[trackNumber].cues).some(c => c.text == subtitle.text && c.startTime == subtitle.time / 1000 && c.endTime == (subtitle.time + subtitle.duration) / 1000)) {
let cue = new VTTCue(subtitle.time / 1000, (subtitle.time + subtitle.duration) / 1000, subtitle.text)
tracks[trackNumber].addCue(cue)
}
}
})
stream.pipe(subtitleStream)
}
}
function renderSubs(trackNumber) {
let trackContent = headers[trackNumber].slice(0,-1)+subtitles[trackNumber].join("\n")
if (!octopusInstance) {
let options = {
video: video,
subContent: trackContent,
workerUrl: 'js/subtitles-octopus-worker.js'
};
octopusInstance = new SubtitlesOctopus(options);
} else {
octopusInstance.setTrack(trackContent)
}
}

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

589
app/js/subtitles-octopus.js Normal file
View file

@ -0,0 +1,589 @@
var SubtitlesOctopus = function (options) {
var supportsWebAssembly = false;
try {
if (typeof WebAssembly === "object"
&& typeof WebAssembly.instantiate === "function") {
const module = new WebAssembly.Module(Uint8Array.of(0x0, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00));
if (module instanceof WebAssembly.Module)
supportsWebAssembly = (new WebAssembly.Instance(module) instanceof WebAssembly.Instance);
}
} catch (e) {
}
console.log("WebAssembly support detected: " + (supportsWebAssembly ? "yes" : "no"));
var self = this;
self.canvas = options.canvas; // HTML canvas element (optional if video specified)
self.lossyRender = options.lossyRender; // Speedup render for heavy subs
self.isOurCanvas = false; // (internal) we created canvas and manage it
self.video = options.video; // HTML video element (optional if canvas specified)
self.canvasParent = null; // (internal) HTML canvas parent element
self.fonts = options.fonts || []; // Array with links to fonts used in sub (optional)
self.availableFonts = options.availableFonts || []; // Object with all available fonts (optional). Key is font name in lower case, value is link: {"arial": "/font1.ttf"}
self.onReadyEvent = options.onReady; // Function called when SubtitlesOctopus is ready (optional)
if (supportsWebAssembly) {
self.workerUrl = options.workerUrl || 'subtitles-octopus-worker.js'; // Link to WebAssembly worker
} else {
self.workerUrl = options.legacyWorkerUrl || 'subtitles-octopus-worker-legacy.js'; // Link to legacy worker
}
self.subUrl = options.subUrl; // Link to sub file (optional if subContent specified)
self.subContent = options.subContent || null; // Sub content (optional if subUrl specified)
self.onErrorEvent = options.onError; // Function called in case of critical error meaning sub wouldn't be shown and you should use alternative method (for instance it occurs if browser doesn't support web workers).
self.debug = options.debug || false; // When debug enabled, some performance info printed in console.
self.lastRenderTime = 0; // (internal) Last time we got some frame from worker
self.pixelRatio = window.devicePixelRatio || 1; // (internal) Device pixel ratio (for high dpi devices)
self.timeOffset = options.timeOffset || 0; // Time offset would be applied to currentTime from video (option)
self.hasAlphaBug = false;
(function() {
if (typeof ImageData.prototype.constructor === 'function') {
try {
// try actually calling ImageData, as on some browsers it's reported
// as existing but calling it errors out as "TypeError: Illegal constructor"
new window.ImageData(new Uint8ClampedArray([0, 0, 0, 0]), 1, 1);
return;
} catch (e) {
console.log("detected that ImageData is not constructable despite browser saying so");
}
}
var canvas = document.createElement('canvas');
var ctx = canvas.getContext('2d');
window.ImageData = function () {
var i = 0;
if (arguments[0] instanceof Uint8ClampedArray) {
var data = arguments[i++];
}
var width = arguments[i++];
var height = arguments[i];
var imageData = ctx.createImageData(width, height);
if (data) imageData.data.set(data);
return imageData;
}
})();
self.workerError = function (error) {
console.error('Worker error: ', error);
if (self.onErrorEvent) {
self.onErrorEvent(error);
}
if (!self.debug) {
self.dispose();
throw new Error('Worker error: ' + error);
}
};
// Not tested for repeated usage yet
self.init = function () {
if (!window.Worker) {
self.workerError('worker not supported');
return;
}
// Worker
if (!self.worker) {
self.worker = new Worker(self.workerUrl);
self.worker.onmessage = self.onWorkerMessage;
self.worker.onerror = self.workerError;
}
self.workerActive = false;
self.createCanvas();
self.setVideo(options.video);
self.setSubUrl(options.subUrl);
self.worker.postMessage({
target: 'worker-init',
width: self.canvas.width,
height: self.canvas.height,
URL: document.URL,
currentScript: self.workerUrl,
preMain: true,
fastRender: self.lossyRender,
subUrl: self.subUrl,
subContent: self.subContent,
fonts: self.fonts,
availableFonts: self.availableFonts,
debug: self.debug
});
};
self.createCanvas = function () {
if (!self.canvas) {
if (self.video) {
self.isOurCanvas = true;
self.canvas = document.createElement('canvas');
self.canvas.className = 'libassjs-canvas';
self.canvas.style.display = 'none';
self.canvasParent = document.createElement('div');
self.canvasParent.className = 'libassjs-canvas-parent';
self.canvasParent.appendChild(self.canvas);
if (self.video.nextSibling) {
self.video.parentNode.insertBefore(self.canvasParent, self.video.nextSibling);
}
else {
self.video.parentNode.appendChild(self.canvasParent);
}
}
else {
if (!self.canvas) {
self.workerError('Don\'t know where to render: you should give video or canvas in options.');
}
}
}
self.ctx = self.canvas.getContext('2d');
self.bufferCanvas = document.createElement('canvas');
self.bufferCanvasCtx = self.bufferCanvas.getContext('2d');
// test for alpha bug, where e.g. WebKit can render a transparent pixel
// (with alpha == 0) as non-black which then leads to visual artifacts
self.bufferCanvas.width = 1;
self.bufferCanvas.height = 1;
var testBuf = new Uint8ClampedArray([0, 255, 0, 0]);
var testImage = new ImageData(testBuf, 1, 1);
self.bufferCanvasCtx.clearRect(0, 0, 1, 1);
self.ctx.clearRect(0, 0, 1, 1);
var prePut = self.ctx.getImageData(0, 0, 1, 1).data;
self.bufferCanvasCtx.putImageData(testImage, 0, 0);
self.ctx.drawImage(self.bufferCanvas, 0, 0);
var postPut = self.ctx.getImageData(0, 0, 1, 1).data;
self.hasAlphaBug = prePut[1] != postPut[1];
if (self.hasAlphaBug) {
console.log("Detected a browser having issue with transparent pixels, applying workaround");
}
};
self.setVideo = function (video) {
self.video = video;
if (self.video) {
var timeupdate = function () {
self.setCurrentTime(video.currentTime + self.timeOffset);
}
self.video.addEventListener("timeupdate", timeupdate, false);
self.video.addEventListener("playing", function () {
self.setIsPaused(false, video.currentTime + self.timeOffset);
}, false);
self.video.addEventListener("pause", function () {
self.setIsPaused(true, video.currentTime + self.timeOffset);
}, false);
self.video.addEventListener("seeking", function () {
self.video.removeEventListener("timeupdate", timeupdate);
}, false);
self.video.addEventListener("seeked", function () {
self.video.addEventListener("timeupdate", timeupdate, false);
self.setCurrentTime(video.currentTime + self.timeOffset);
}, false);
self.video.addEventListener("ratechange", function () {
self.setRate(video.playbackRate);
}, false);
self.video.addEventListener("timeupdate", function () {
self.setCurrentTime(video.currentTime + self.timeOffset);
}, false);
self.video.addEventListener("waiting", function () {
self.setIsPaused(true, video.currentTime + self.timeOffset);
}, false);
document.addEventListener("fullscreenchange", self.resizeWithTimeout, false);
document.addEventListener("mozfullscreenchange", self.resizeWithTimeout, false);
document.addEventListener("webkitfullscreenchange", self.resizeWithTimeout, false);
document.addEventListener("msfullscreenchange", self.resizeWithTimeout, false);
window.addEventListener("resize", self.resizeWithTimeout, false);
// Support Element Resize Observer
if (typeof ResizeObserver !== "undefined") {
self.ro = new ResizeObserver(self.resizeWithTimeout);
self.ro.observe(self.video);
}
if (self.video.videoWidth > 0) {
self.resize();
}
else {
self.video.addEventListener("loadedmetadata", function (e) {
e.target.removeEventListener(e.type, arguments.callee);
self.resize();
}, false);
}
}
};
self.getVideoPosition = function () {
var videoRatio = self.video.videoWidth / self.video.videoHeight;
var width = self.video.offsetWidth, height = self.video.offsetHeight;
var elementRatio = width / height;
var realWidth = width, realHeight = height;
if (elementRatio > videoRatio) realWidth = Math.floor(height * videoRatio);
else realHeight = Math.floor(width / videoRatio);
var x = (width - realWidth) / 2;
var y = (height - realHeight) / 2;
return {
width: realWidth,
height: realHeight,
x: x,
y: y
};
};
self.setSubUrl = function (subUrl) {
self.subUrl = subUrl;
};
self.renderFrameData = null;
function renderFrames() {
var data = self.renderFramesData;
var beforeDrawTime = performance.now();
self.ctx.clearRect(0, 0, self.canvas.width, self.canvas.height);
for (var i = 0; i < data.canvases.length; i++) {
var image = data.canvases[i];
self.bufferCanvas.width = image.w;
self.bufferCanvas.height = image.h;
var imageBuffer = new Uint8ClampedArray(image.buffer);
if (self.hasAlphaBug) {
for (var j = 3; j < imageBuffer.length; j = j + 4) {
imageBuffer[j] = (imageBuffer[j] >= 1) ? imageBuffer[j] : 1;
}
}
var imageData = new ImageData(imageBuffer, image.w, image.h);
self.bufferCanvasCtx.putImageData(imageData, 0, 0);
self.ctx.drawImage(self.bufferCanvas, image.x, image.y);
}
if (self.debug) {
var drawTime = Math.round(performance.now() - beforeDrawTime);
console.log(Math.round(data.spentTime) + ' ms (+ ' + drawTime + ' ms draw)');
self.renderStart = performance.now();
}
}
/**
* Lossy Render Mode
*
*/
function renderFastFrames() {
var data = self.renderFramesData;
var beforeDrawTime = performance.now();
self.ctx.clearRect(0, 0, self.canvas.width, self.canvas.height);
for (var i = 0; i < data.bitmaps.length; i++) {
var image = data.bitmaps[i];
self.ctx.drawImage(image.bitmap, image.x, image.y);
}
if (self.debug) {
var drawTime = Math.round(performance.now() - beforeDrawTime);
console.log(data.bitmaps.length + ' bitmaps, libass: ' + Math.round(data.libassTime) + 'ms, decode: ' + Math.round(data.decodeTime) + 'ms, draw: ' + drawTime + 'ms');
self.renderStart = performance.now();
}
}
self.workerActive = false;
self.frameId = 0;
self.onWorkerMessage = function (event) {
//dump('\nclient got ' + JSON.stringify(event.data).substr(0, 150) + '\n');
if (!self.workerActive) {
self.workerActive = true;
if (self.onReadyEvent) {
self.onReadyEvent();
}
}
var data = event.data;
switch (data.target) {
case 'stdout': {
console.log(data.content);
break;
}
case 'console-log': {
console.log.apply(console, JSON.parse(data.content));
break;
}
case 'console-debug': {
console.debug.apply(console, JSON.parse(data.content));
break;
}
case 'console-info': {
console.info.apply(console, JSON.parse(data.content));
break;
}
case 'console-warn': {
console.warn.apply(console, JSON.parse(data.content));
break;
}
case 'console-error': {
console.error.apply(console, JSON.parse(data.content));
break;
}
case 'stderr': {
console.error(data.content);
break;
}
case 'window': {
window[data.method]();
break;
}
case 'canvas': {
switch (data.op) {
case 'getContext': {
self.ctx = self.canvas.getContext(data.type, data.attributes);
break;
}
case 'resize': {
self.resize(data.width, data.height);
break;
}
case 'renderCanvas': {
if (self.lastRenderTime < data.time) {
self.lastRenderTime = data.time;
self.renderFramesData = data;
window.requestAnimationFrame(renderFrames);
}
break;
}
case 'renderFastCanvas': {
if (self.lastRenderTime < data.time) {
self.lastRenderTime = data.time;
self.renderFramesData = data;
window.requestAnimationFrame(renderFastFrames);
}
break;
}
case 'setObjectProperty': {
self.canvas[data.object][data.property] = data.value;
break;
}
default:
throw 'eh?';
}
break;
}
case 'tick': {
self.frameId = data.id;
self.worker.postMessage({
target: 'tock',
id: self.frameId
});
break;
}
case 'custom': {
if (self['onCustomMessage']) {
self['onCustomMessage'](event);
} else {
throw 'Custom message received but client onCustomMessage not implemented.';
}
break;
}
case 'setimmediate': {
self.worker.postMessage({
target: 'setimmediate'
});
break;
}
case 'get-events': {
console.log(data.target);
console.log(data.events);
break;
}
case 'get-styles': {
console.log(data.target);
console.log(data.styles);
break;
}
default:
throw 'what? ' + data.target;
}
};
self.resize = function (width, height, top, left) {
var videoSize = null;
top = top || 0;
left = left || 0;
if ((!width || !height) && self.video) {
videoSize = self.getVideoPosition();
width = videoSize.width * self.pixelRatio;
height = videoSize.height * self.pixelRatio;
var offset = self.canvasParent.getBoundingClientRect().top - self.video.getBoundingClientRect().top;
top = videoSize.y - offset;
left = videoSize.x;
}
if (!width || !height) {
if (!self.video) {
console.error('width or height is 0. You should specify width & height for resize.');
}
return;
}
if (
self.canvas.width != width ||
self.canvas.height != height ||
self.canvas.style.top != top ||
self.canvas.style.left != left
) {
self.canvas.width = width;
self.canvas.height = height;
if (videoSize != null) {
self.canvasParent.style.position = 'relative';
self.canvas.style.display = 'block';
self.canvas.style.position = 'absolute';
self.canvas.style.width = videoSize.width + 'px';
self.canvas.style.height = videoSize.height + 'px';
self.canvas.style.top = top + 'px';
self.canvas.style.left = left + 'px';
self.canvas.style.pointerEvents = 'none';
}
self.worker.postMessage({
target: 'canvas',
width: self.canvas.width,
height: self.canvas.height
});
}
};
self.resizeWithTimeout = function () {
self.resize();
setTimeout(self.resize, 100);
};
self.runBenchmark = function () {
self.worker.postMessage({
target: 'runBenchmark'
});
};
self.customMessage = function (data, options) {
options = options || {};
self.worker.postMessage({
target: 'custom',
userData: data,
preMain: options.preMain
});
};
self.setCurrentTime = function (currentTime) {
self.worker.postMessage({
target: 'video',
currentTime: currentTime
});
};
self.setTrackByUrl = function (url) {
self.worker.postMessage({
target: 'set-track-by-url',
url: url
});
};
self.setTrack = function (content) {
self.worker.postMessage({
target: 'set-track',
content: content
});
};
self.freeTrack = function (content) {
self.worker.postMessage({
target: 'free-track'
});
};
self.render = self.setCurrentTime;
self.setIsPaused = function (isPaused, currentTime) {
self.worker.postMessage({
target: 'video',
isPaused: isPaused,
currentTime: currentTime
});
};
self.setRate = function (rate) {
self.worker.postMessage({
target: 'video',
rate: rate
});
};
self.dispose = function () {
self.worker.postMessage({
target: 'destroy'
});
self.worker.terminate();
self.workerActive = false;
// Remove the canvas element to remove residual subtitles rendered on player
if (self.video) {
self.video.parentNode.removeChild(self.canvasParent);
}
};
self.createEvent = function (event) {
self.worker.postMessage({
target: 'create-event',
event: event
});
};
self.getEvents = function () {
self.worker.postMessage({
target: 'get-events'
});
};
self.setEvent = function (event, index) {
self.worker.postMessage({
target: 'set-event',
event: event,
index: index
});
};
self.removeEvent = function (index) {
self.worker.postMessage({
target: 'remove-event',
index: index
});
};
self.createStyle = function (style) {
self.worker.postMessage({
target: 'create-style',
style: style
});
};
self.getStyles = function () {
self.worker.postMessage({
target: 'get-styles'
});
};
self.setStyle = function (style, index) {
self.worker.postMessage({
target: 'set-style',
style: style,
index: index
});
};
self.removeStyle = function (index) {
self.worker.postMessage({
target: 'remove-style',
index: index
});
};
self.init();
};
if (typeof SubtitlesOctopusOnLoad == 'function') {
SubtitlesOctopusOnLoad();
}
if (typeof exports !== 'undefined') {
if (typeof module !== 'undefined' && module.exports) {
exports = module.exports = SubtitlesOctopus
}
}

View file

@ -1,4 +1,4 @@
let client = new WebTorrent()
client = new WebTorrent()
window.onbeforeunload = () => {
client.torrents[0] ? client.torrents[0].store.destroy() : ""
client.torrents[0] ? client.torrents[0].destroy() : ""
@ -62,7 +62,7 @@ async function addTorrent(magnet) {
resetVideo()
selected ? selPlaying(selected) : ""
await sw
client.add(magnet, async function (torrent) {
client.add(magnet, function (torrent) {
torrent.on('noPeers', function () {
if (client.torrents[0].progress != 1) {
halfmoon.initStickyAlert({
@ -138,7 +138,7 @@ function serveFile(file, req) {
res.body = req.method === 'HEAD' ? '' : 'stream'
// parser is really a passthrough mkv stream now
let stream = file.createReadStream(range)
parseSubs(stream)
subStream(stream)
return [res, req.method === 'GET' && subtitleStream || stream]
}

56
app/test.ass Normal file
View file

@ -0,0 +1,56 @@
[Script Info]
Title:
ScriptType: v4.00+
WrapStyle: 0
PlayResX: 640
PlayResY: 480
ScaledBorderAndShadow: yes
[V4+ Styles]
Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
Style: Default,Arial,20,&H00FFFFFF,&H000000FF,&H00000000,&H00000000,0,0,0,0,100,100,0,0,1,2,2,2,10,10,10,0
Style: Alt,Times New Roman,40,&H00FFFFFF,&H000000FF,&H00000000,&H00000000,0,0,0,0,100,100,0,0,1,2,2,8,10,10,10,0
[Events]
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
Comment: 0,0:00:00.00,0:00:05.00,Default,,0000,0000,0000,,This line is a comment and should not display on rendered output. This file is intended to be rendered on a 640x480 video, preferably a single solid colour, or maybe a checkerboard pattern.
Dialogue: 0,0:00:05.00,0:00:07.00,Default,,0000,0000,0000,,This and the previous line should both show at the bottom of the video in 20 pixel high white Arial with a 2 pixel black outline and 2 pixel offset shadow. This line is long so it should automatically be broken into several lines that are approximately even length, with upper lines being longer.
Dialogue: 0,0:00:07.00,0:00:09.00,Default,,0000,0000,0000,,Now going to test conflict resolution. {By the way, this is a common abuse, "inline-comments" in override tag groups. They should not be rendered and not generate errors, just be ignored.}
Dialogue: 0,0:00:07.50,0:00:09.00,Default,,0000,0000,0000,,This line should be above the previous.
Dialogue: 0,0:00:09.00,0:00:11.00,Default,,0000,0000,0000,,{\an1}This line should be lower-left aligned using the \an1 override.
Dialogue: 0,0:00:11.00,0:00:13.00,Default,,0000,0000,0000,,{\an9}Upper right
Dialogue: 0,0:00:12.00,0:00:14.00,Alt,,0000,0000,0000,,This one should be below the "Upper right" line and be centered on screen. It should also be in 40 pixel high Times New Roman.
Dialogue: 0,0:00:14.00,0:00:16.00,Alt,,0000,0000,0000,,{\fs10}This should also be a toptitle in Times New Roman, but much smaller. Using an override tag to change size.
Dialogue: 0,0:00:16.00,0:00:18.00,Alt,,0000,0000,0000,,{\a5}Top-left using the legacy \a5 tag
Dialogue: 0,0:00:18.00,0:00:20.00,Default,,0000,0000,0000,,{\an5}This line is centered on screen.\NAt the bottom of the screen should be two lines in different layers, they are rendered on top of each other because conflict resolution only considers lines in the same layer.
Dialogue: 0,0:00:18.00,0:00:20.00,Default,,0000,0000,0000,,This is in layer 0
Dialogue: 1,0:00:18.00,0:00:20.00,Default,,0000,0000,0000,,And this line is in layer 1
Dialogue: 0,0:00:20.00,0:00:22.00,Default,,0000,0000,0000,,Another line in layer 0
Dialogue: 1,0:00:20.00,0:00:22.00,Default,,0050,0000,0020,,And this line is in layer 1, but has changed its margins so it isn't exactly centered, and it's raised above the other line.
Dialogue: 0,0:00:22.00,0:00:24.00,Default,,0000,0000,0000,,Switching {\fnTimes New Roman}font inline, {\r}and resetting styles
Dialogue: 0,0:00:24.00,0:00:26.00,Default,,0000,0000,0000,,Also {\rAlt}switching to a different style {\r}inline
Dialogue: 0,0:00:26.00,0:00:28.00,Default,,0000,0000,0000,,{\an5\pos(258,131)}Positioning... this line should be in an odd place
Dialogue: 0,0:00:26.00,0:00:28.00,Default,,0000,0000,0000,,{\an5\pos(222,338)\frz29.559}So should this one, and be rotated
Dialogue: 0,0:00:28.00,0:00:30.00,Default,,0000,0000,0000,,{\pos(371,314)\frz338.403\frx42\fry30\org(187,165)}Some more rotation tricks, this time in 3D and with a different origin
Dialogue: 0,0:00:30.00,0:00:32.00,Default,,0000,0000,0000,,{\an5\t(\frz360)}This line is animated and rotates 360 degrees over its duration
Dialogue: 0,0:00:32.00,0:00:34.00,Default,,0000,0000,0000,,{\an5\move(182,157,414,311)}This line moves
Dialogue: 0,0:00:34.00,0:00:36.00,Default,,0000,0000,0000,,{\an5\clip(126,192,487,241)}This line is only partially visible\Nso if you can see this, \clip is handled incorrectly
Dialogue: 0,0:00:36.00,0:00:38.00,Default,,0000,0000,0000,,{\an5\clip(m 178 212 l 163 249 212 302 257 232 309 249 366 241 392 285 473 267 487 210 421 241 353 219 207 258)}And this line has a funnily-shaped clip area
Dialogue: 0,0:00:38.00,0:00:40.00,Default,,0000,0000,0000,,{\an5\pos(321,193)}Here should be a pretty feather with a thin border and no shadow:
Dialogue: 0,0:00:38.00,0:00:40.00,Default,,0000,0000,0000,,{\an5\bord1\shad0\p1\fscx300\fscy300\pos(316,312)}m 0 0 b 33 -3 48 -7 61 -19 b 53 -7 31 -1 1 2 m 9 0 b 9 -5 10 -6 14 -12 b 9 -5 10 -3 10 0 m 10 -1 b 10 -5 11 -8 16 -12 b 17 -12 18 -13 22 -14 b 18 -10 16 -6 15 -1 b 17 -6 19 -10 23 -14 l 26 -15 b 22 -11 20 -6 19 -2 b 24 -11 27 -14 28 -15 b 31 -16 35 -16 39 -16 b 37 -14 36 -10 36 -5 b 36 -9 38 -14 39 -16 b 41 -16 43 -16 45 -15 b 44 -13 43 -10 43 -7 l 29 -3 m 43 -7 b 44 -11 45 -13 47 -15 b 46 -13 46 -11 46 -9 b 46 -11 47 -13 49 -15 b 51 -15 48 -13 48 -9 b 49 -11 49 -14 52 -16 b 55 -16 53 -17 51 -11 l 48 -8 m 54 -13 b 53 -14 54 -15 54 -16 b 54 -15 54 -14 55 -13 b 57 -13 60 -12 62 -14 b 61 -12 55 -12 53 -12 b 55 -11 56 -10 61 -10 b 61 -7 53 -9 52 -11 b 53 -8 55 -7 58 -6 b 57 -5 56 -4 54 -3 b 49 -5 48 -6 47 -9 b 47 -6 50 -4 53 -2 b 49 -3 47 -6 45 -8 b 46 -6 49 -3 52 -1 b 46 2 45 3 43 3 b 40 0 39 -3 40 -6 b 39 -3 39 1 42 3 b 40 6 36 1 34 -4 b 35 -1 36 2 38 5 b 32 7 26 9 22 8 b 19 5 17 2 16 -1 b 17 3 19 5 20 7 b 17 4 16 2 15 0 b 15 2 17 5 19 8 b 15 9 13 3 12 0 l 25 -2 l 42 -7 m 12 0 b 13 3 13 7 17 9 b 12 7 10 1 10 0
Dialogue: 0,0:00:40.00,0:00:42.00,Default,,0000,0000,0000,,Some other basic font style tests:\NNormal, {\b1}Boldface{\r}, {\i1}Italics{\r}, {\u1}Underline{\r}, {\s1}Strikethrough
Dialogue: 0,0:00:42.00,0:00:44.00,Default,,0000,0000,0000,,There should be no linebreak here,\nbut there should be one here\Nso this is on a separate line.
Dialogue: 0,0:00:44.00,0:00:46.00,Default,,0000,0000,0000,,This line is pushed left by adding some hard spaces to the right\h\h\h\h\h\h\h\h\h\h\h\h\h\h\h\h\h\h\h\h
Dialogue: 0,0:00:44.00,0:00:46.00,Default,,0000,0000,0000,, Regular spaces at line start/end are ignored, so this line is centered
Dialogue: 0,0:00:46.00,0:00:48.00,Default,,0000,0000,0000,,{\an8\q2}This is a long line but it is not broken into several because it's using the \q override tag which changes the line-wrapping mode for a single line. The tag can take several values, 0 is the default which is "smart wrapping" with top lines longer, 1 is regular wrapping, 2 is no wrapping and 3 is smart-wrapping with bottom lines longer.
Dialogue: 0,0:00:46.00,0:00:48.00,Default,,0000,0000,0000,,{\an5\q1}This is a long line using the \q1 override tag to get "dumb" wrapping, make as long lines as possible and first wrap when it'd go off screen. This is mostly visible when the last line generated is short.
Dialogue: 0,0:00:46.00,0:00:48.00,Default,,0000,0000,0000,,{\q3}And this line uses the \q3 wrapping style, which is similar to the \q0 default smart-wrap style, but has lower lines longer than the upper ones.
Dialogue: 0,0:00:48.00,0:00:50.00,Default,,0000,0000,0000,,{\an5}{\bord0\t(\bord10)}This text has a growing border\N{\r\shad0\t(\shad10)}This text has an increasing shadow\N{\r\t(\fry360)}This text is rotating around the Y axis\N{\r\t(\frz720)}This text rotates {\t(\frx360)}and this one even more
Dialogue: 0,0:00:50.00,0:00:52.00,Default,,0000,0000,0000,,{\an5\bord0\shad0}All of this has no border or shadow...\N{\1c&H0000FF&}This is bright red, {\1c&H00FF00&}green, {\1c&HFF0000}blue{\1c&HFFFFFF&}\N{\1a&H80&}50% transparent
Dialogue: 0,0:00:52.00,0:00:54.00,Default,,0000,0000,0000,,{\fsp10}Large inter-letter spacing
Dialogue: 0,0:00:54.00,0:00:56.00,Default,,0000,0000,0000,,{\shad0}No shadow here, just regular border, {\be1}but this is \be1 blurred edges!
Dialogue: 0,0:00:56.00,0:00:58.00,Default,,0000,0000,0000,,{\t(1000,2000,\alpha&HFF&)}Fading out after 1 sec on screen, using \alpha. Everything should fade.
Dialogue: 0,0:00:58.00,0:01:00.00,Default,,0000,0000,0000,,{\fad(1000,1000)}Fading in and out using \fad
Dialogue: 0,0:01:00.00,0:01:02.00,Default,,0000,0000,0000,,{\k10}And {\k5}now {\k20}for {\kf50}ka{\kf20}ra{\K70}o{\K10}ke{\k0}!
Dialogue: 0,0:00:00.00,0:00:05.00,Default,,0000,0000,0000,,This is a test of the ASS format and some basic features in it.
Dialogue: 0,0:01:02.00,0:01:04.00,Default,,0000,0000,0000,,{\an5\t()\clip(69,215,573,267)\t(1000,2000,3,\clip(573,267,573,267))}And the last thing on the programme, an animated rectangular \clip, delayed by 1 second. And it even uses acceleration...!