feat: monorepo

This commit is contained in:
ThaUnknown 2023-11-12 14:43:06 +01:00
parent c04db6548e
commit 832b3bb33d
97 changed files with 2466 additions and 1818 deletions

10
capacitor/.gitignore vendored Normal file
View file

@ -0,0 +1,10 @@
/node_modules/
/public/build/
/public
.routify/
android/
ios/
package-lock.json
.env
.DS_Store

21
capacitor/LICENSE Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 t. Macleod Sawyer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,20 @@
const config = {
appId: 'watch.miru',
appName: 'Miru',
webDir: 'public',
bundledWebRuntime: false,
plugins: {
SplashScreen: {
launchShowDuration: 0
}
},
// remove server section before making production build
server: {
// for android only, below settings will work out of the box
// for iOS or both, change the url to http://your-device-ip
// To discover your workstation IP, just run ifconfig
url: 'http://10.0.2.2:5001',
cleartext: true
}
}
module.exports = config

21
capacitor/index.html Normal file
View file

@ -0,0 +1,21 @@
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="apple-touch-icon" sizes="152x152" href="/assets/favicon/apple-touch-icon.png">
<link rel="icon" type="image/png" sizes="32x32" href="/assets/favicon/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="/assets/favicon/favicon-16x16.png">
<link rel="manifest" href="/assets/favicon/site.webmanifest">
<link rel="mask-icon" href="/assets/favicon/safari-pinned-tab.svg" color="#5bbad5">
<meta name="msapplication-TileColor" content="#da532c">
<meta name="theme-color" content="#ffffff">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Svelte_Capacitor v2.0.0</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.ts"></script>
</body>
</html>

26
capacitor/package.json Normal file
View file

@ -0,0 +1,26 @@
{
"scripts": {
"build:app": "vite build",
"build:android": "run-s build:app cap-run:android",
"build:ios": "run-s build:app cap-run:ios",
"cap-run:android": "cap sync android && cap open android",
"cap-run:ios": "cap sync ios && cap open ios",
"dev:ios": "run-p dev:start cap-run:ios",
"dev:android": "run-p dev:start cap-run:android",
"dev:preview": "vite preview",
"dev:start": "run-p dev:vite",
"dev:vite": "vite --host --port 5001"
},
"devDependencies": {
"cordova-res": "^0.15.4"
},
"dependencies": {
"@capacitor/android": "^5.5.1",
"@capacitor/cli": "^5.5.1",
"@capacitor/core": "^5.5.1",
"@capacitor/ios": "^5.5.1",
"cordova-plugin-chrome-apps-common": "^1.0.7",
"cordova-plugin-chrome-apps-sockets-tcp": "github:KoenLav/cordova-plugin-chrome-apps-sockets-tcp",
"cordova-plugin-chrome-apps-sockets-udp": "^1.3.0"
}
}

1
capacitor/src/App.svelte Normal file
View file

@ -0,0 +1 @@
<h1>uwu</h1>

7
capacitor/src/main.js Normal file
View file

@ -0,0 +1,7 @@
import App from './App.svelte'
const app = new App({
target: document.getElementById('app')
})
export default app

View file

@ -0,0 +1,54 @@
import webpack from 'webpack'
import TerserPlugin from 'terser-webpack-plugin'
import info from 'webtorrent/package.json' assert { type: 'json' }
/** @type {import('webpack').Configuration} */
export default {
entry: './index.js',
devtool: 'source-map',
resolve: {
aliasFields: ['chromeapp'],
alias: {
...info.chromeapp,
path: 'path-esm',
stream: 'stream-browserify',
timers: 'timers-browserify',
crypto: 'crypto-browserify',
buffer: 'buffer',
querystring: 'querystring',
zlib: '/polyfills/inflate-sync-web.js'
}
},
output: {
chunkFormat: 'module',
filename: 'webtorrent.chromeapp.js',
library: {
type: 'module'
}
},
mode: 'production',
target: 'web',
experiments: {
outputModule: true
},
plugins: [
new webpack.ProvidePlugin({
process: '/polyfills/process-fast.js',
Buffer: ['buffer', 'Buffer']
}),
new webpack.DefinePlugin({
global: 'globalThis'
})
],
optimization: {
minimize: true,
minimizer: [new TerserPlugin({
terserOptions: {
format: {
comments: false
}
},
extractComments: false
})]
}
}

View file

@ -1,56 +1,56 @@
<script context='module'>
import { setContext } from 'svelte'
import { writable } from 'simple-store-svelte'
import { alRequest } from '@/modules/anilist.js'
export const page = writable('home')
export const view = writable(null)
export async function handleAnime (anime) {
view.set(null)
view.set((await alRequest({ method: 'SearchIDSingle', id: anime })).data.Media)
}
window.IPC.on('open-anime', handleAnime)
window.IPC.on('schedule', () => {
page.set('schedule')
})
</script>
<script>
import Sidebar from './components/Sidebar.svelte'
import Router from './Router.svelte'
import ViewAnime from './views/ViewAnime/ViewAnime.svelte'
import RSSView from './views/RSSView.svelte'
import Menubar from './components/Menubar.svelte'
import IspBlock from './views/IspBlock.svelte'
import { Toaster } from 'svelte-sonner'
import Logout from './components/Logout.svelte'
setContext('view', view)
</script>
<div class='page-wrapper with-sidebar with-transitions bg-dark' data-sidebar-type='overlayed-all'>
<IspBlock />
<Menubar bind:page={$page} />
<ViewAnime />
<Logout />
<Sidebar bind:page={$page} />
<div class='overflow-hidden content-wrapper h-full z-10'>
<Toaster visibleToasts={6} position='top-right' theme='dark' richColors duration={10000} closeButton />
<RSSView />
<Router bind:page={$page} />
</div>
</div>
<style>
.content-wrapper {
will-change: width;
top: 0 !important;
}
.page-wrapper > .content-wrapper {
margin-left: var(--sidebar-minimised) !important;
position: unset !important;
width: calc(100% - var(--sidebar-minimised)) !important;
transition: none !important;
}
</style>
<script context='module'>
import { setContext } from 'svelte'
import { writable } from 'simple-store-svelte'
import { alRequest } from '@/modules/anilist.js'
export const page = writable('home')
export const view = writable(null)
export async function handleAnime (anime) {
view.set(null)
view.set((await alRequest({ method: 'SearchIDSingle', id: anime })).data.Media)
}
window.IPC.on('open-anime', handleAnime)
window.IPC.on('schedule', () => {
page.set('schedule')
})
</script>
<script>
import Sidebar from './components/Sidebar.svelte'
import Router from './Router.svelte'
import ViewAnime from './views/ViewAnime/ViewAnime.svelte'
import RSSView from './views/RSSView.svelte'
import Menubar from './components/Menubar.svelte'
import IspBlock from './views/IspBlock.svelte'
import { Toaster } from 'svelte-sonner'
import Logout from './components/Logout.svelte'
setContext('view', view)
</script>
<div class='page-wrapper with-sidebar with-transitions bg-dark' data-sidebar-type='overlayed-all'>
<IspBlock />
<Menubar bind:page={$page} />
<ViewAnime />
<Logout />
<Sidebar bind:page={$page} />
<div class='overflow-hidden content-wrapper h-full z-10'>
<Toaster visibleToasts={6} position='top-right' theme='dark' richColors duration={10000} closeButton />
<RSSView />
<Router bind:page={$page} />
</div>
</div>
<style>
.content-wrapper {
will-change: width;
top: 0 !important;
}
.page-wrapper > .content-wrapper {
margin-left: var(--sidebar-minimised) !important;
position: unset !important;
width: calc(100% - var(--sidebar-minimised)) !important;
transition: none !important;
}
</style>

15
common/jsconfig.json Normal file
View file

@ -0,0 +1,15 @@
{
"compilerOptions": {
"baseUrl": "./",
"paths": {
"@/*": ["./*"],
},
"checkJs": true,
"target": "ESNext",
"moduleResolution": "node",
"module": "ESNext",
"types": ["./types.d.ts"],
"allowSyntheticDefaultImports": true
},
"exclude": ["node_modules/**", "**/node_modules", "dist", "build"]
}

View file

@ -1,460 +1,460 @@
import { DOMPARSER, PromiseBatch, binarySearch } from './util.js'
import { alRequest, alSearch } from './anilist.js'
import _anitomyscript from 'anitomyscript'
import { toast } from 'svelte-sonner'
import SectionsManager from './sections.js'
import { page } from '@/App.svelte'
import clipboard from './clipboard.js'
import { search, key } from '@/views/Search.svelte'
import { playAnime } from '../views/RSSView.svelte'
const imageRx = /\.(jpeg|jpg|gif|png|webp)/i
clipboard.on('files', ({ detail }) => {
for (const file of detail) {
if (file.type.startsWith('image')) {
toast.promise(traceAnime(file), {
description: 'You can also paste an URL to an image.',
loading: 'Looking up anime for image...',
success: 'Found anime for image!',
error: 'Couldn\'t find anime for specified image! Try to remove black bars, or use a more detailed image.'
})
}
}
})
clipboard.on('text', ({ detail }) => {
for (const { type, text } of detail) {
let src = null
if (type === 'text/html') {
src = DOMPARSER(text, 'text/html').querySelectorAll('img')[0]?.src
} else if (imageRx.exec(text)) {
src = text
}
if (src) {
toast.promise(traceAnime(src), {
description: 'You can also paste an URL to an image.',
loading: 'Looking up anime for image...',
success: 'Found anime for image!',
error: 'Couldn\'t find anime for specified image! Try to remove black bars, or use a more detailed image.'
})
}
}
})
export async function traceAnime (image) { // WAIT lookup logic
let options
let url = `https://api.trace.moe/search?cutBorders&url=${image}`
if (image instanceof Blob) {
options = {
method: 'POST',
body: image,
headers: { 'Content-type': image.type }
}
url = 'https://api.trace.moe/search'
}
const res = await fetch(url, options)
const { result } = await res.json()
if (result?.length) {
const ids = result.map(({ anilist }) => anilist)
search.value = {
clearNext: true,
load: (page = 1, perPage = 50, variables = {}) => {
const res = alRequest({ method: 'SearchIDS', page, perPage, id: ids, ...SectionsManager.sanitiseObject(variables) }).then(res => {
for (const index in res.data?.Page?.media) {
const media = res.data.Page.media[index]
const counterpart = result.find(({ anilist }) => anilist === media.id)
res.data.Page.media[index] = {
media,
episode: counterpart.episode,
similarity: counterpart.similarity,
episodeData: {
image: counterpart.image,
video: counterpart.video
}
}
}
res.data?.Page?.media.sort((a, b) => b.similarity - a.similarity)
return res
})
return SectionsManager.wrapResponse(res, result.length, 'episode')
}
}
key.value = {}
page.value = 'search'
} else {
throw new Error('Search Failed \n Couldn\'t find anime for specified image! Try to remove black bars, or use a more detailed image.')
}
}
function constructChapters (results, duration) {
const chapters = results.map(result => {
const diff = duration - result.episodeLength
return {
start: (result.interval.startTime + diff) * 1000,
end: (result.interval.endTime + diff) * 1000,
text: result.skipType.toUpperCase()
}
})
const ed = chapters.find(({ text }) => text === 'ED')
const recap = chapters.find(({ text }) => text === 'RECAP')
if (recap) recap.text = 'Recap'
chapters.sort((a, b) => a - b)
if ((chapters[0].start | 0) !== 0) {
chapters.unshift({ start: 0, end: chapters[0].start, text: chapters[0].text === 'OP' ? 'Intro' : 'Episode' })
}
if (ed) {
if ((ed.end | 0) + 5000 - duration * 1000 < 0) {
chapters.push({ start: ed.end, end: duration * 1000, text: 'Preview' })
}
} else if ((chapters[chapters.length - 1].end | 0) + 5000 - duration * 1000 < 0) {
chapters.push({
start: chapters[chapters.length - 1].end,
end: duration * 1000,
text: 'Episode'
})
}
for (let i = 0, len = chapters.length - 2; i <= len; ++i) {
const current = chapters[i]
const next = chapters[i + 1]
if ((current.end | 0) !== (next.start | 0)) {
chapters.push({
start: current.end,
end: next.start,
text: 'Episode'
})
}
}
chapters.sort((a, b) => a.start - b.start)
return chapters
}
export async function getChaptersAniSkip (file, duration) {
const resAccurate = await fetch(`https://api.aniskip.com/v2/skip-times/${file.media.media.idMal}/${file.media.episode}/?episodeLength=${duration}&types=op&types=ed&types=recap`)
const jsonAccurate = await resAccurate.json()
const resRough = await fetch(`https://api.aniskip.com/v2/skip-times/${file.media.media.idMal}/${file.media.episode}/?episodeLength=0&types=op&types=ed&types=recap`)
const jsonRough = await resRough.json()
const map = {}
for (const result of [...jsonAccurate.results, ...jsonRough.results]) {
map[result.skipType] ||= result
}
const results = Object.values(map)
if (!results.length) return []
return constructChapters(results, duration)
}
export function getMediaMaxEp (media, playable) {
if (playable) {
return media.nextAiringEpisode?.episode - 1 || media.airingSchedule?.nodes?.[0]?.episode - 1 || media.episodes
} else {
return media.episodes || media.nextAiringEpisode?.episode - 1 || media.airingSchedule?.nodes?.[0]?.episode - 1
}
}
// resolve anime name based on file name and store it
const postfix = {
1: 'st',
2: 'nd',
3: 'rd'
}
async function resolveTitle (parseObject) {
const name = parseObject.anime_title
const method = { name, method: 'SearchName', perPage: 10, status: ['RELEASING', 'FINISHED'], sort: 'SEARCH_MATCH' }
if (parseObject.anime_year) method.year = parseObject.anime_year
// inefficient but readable
let media = null
try {
// change S2 into Season 2 or 2nd Season
const match = method.name.match(/ S(\d+)/)
const oldname = method.name
if (match) {
if (Number(match[1]) === 1) { // if this is S1, remove the " S1" or " S01"
method.name = method.name.replace(/ S(\d+)/, '')
media = (await alSearch(method)).data.Page.media[0]
} else {
method.name = method.name.replace(/ S(\d+)/, ` ${Number(match[1])}${postfix[Number(match[1])] || 'th'} Season`)
media = (await alSearch(method)).data.Page.media[0]
if (!media) {
method.name = oldname.replace(/ S(\d+)/, ` Season ${Number(match[1])}`)
media = (await alSearch(method)).data.Page.media[0]
}
}
} else {
media = (await alSearch(method)).data.Page.media[0]
}
// remove - :
if (!media) {
const match = method.name.match(/[-:]/g)
if (match) {
method.name = method.name.replace(/[-:]/g, '')
media = (await alSearch(method)).data.Page.media[0]
}
}
// remove (TV)
if (!media) {
const match = method.name.match(/\(TV\)/)
if (match) {
method.name = method.name.replace('(TV)', '')
media = (await alSearch(method)).data.Page.media[0]
}
}
// check adult
if (!media) {
method.isAdult = true
media = (await alSearch(method)).data.Page.media[0]
}
} catch (e) { }
if (media) relations[getRelationKey(parseObject)] = media
}
// utility method for correcting anitomyscript woes for what's needed
export async function anitomyscript (...args) {
// @ts-ignore
const res = await _anitomyscript(...args)
const parseObjs = Array.isArray(res) ? res : [res]
for (const obj of parseObjs) {
const seasonMatch = obj.anime_title.match(/S(\d{2})E(\d{2})/)
if (seasonMatch) {
obj.anime_season = seasonMatch[1]
obj.episode_number = seasonMatch[2]
obj.anime_title = obj.anime_title.replace(/S(\d{2})E(\d{2})/, '')
}
const yearMatch = obj.anime_title.match(/ (19[5-9]\d|20\d{2})/)
if (yearMatch && Number(yearMatch[1]) <= (new Date().getUTCFullYear() + 1)) {
obj.anime_year = yearMatch[1]
obj.anime_title = obj.anime_title.replace(/ (19[5-9]\d|20\d{2})/, '')
}
if (Number(obj.anime_season) > 1) obj.anime_title += ' S' + obj.anime_season
}
return parseObjs
}
function getRelationKey (obj) {
let key = obj.anime_title
if (obj.anime_year) key += obj.anime_year
return key
}
// TODO: anidb aka true episodes need to be mapped to anilist episodes a bit better
export async function resolveFileMedia (fileName) {
const parseObjs = await anitomyscript(fileName)
// batches promises in 10 at a time, because of CF burst protection, which still sometimes gets triggered :/
const uniq = {}
for (const obj of parseObjs) {
const key = getRelationKey(obj)
if (key in relations) continue
uniq[key] = obj
}
await PromiseBatch(resolveTitle, Object.values(uniq), 10)
const fileMedias = []
for (const parseObj of parseObjs) {
let failed = false
let episode
let media = relations[getRelationKey(parseObj)]
// resolve episode, if movie, dont.
const maxep = media?.nextAiringEpisode?.episode || media?.episodes
if ((media?.format !== 'MOVIE' || maxep) && parseObj.episode_number) {
if (Array.isArray(parseObj.episode_number)) {
// is an episode range
if (parseInt(parseObj.episode_number[0]) === 1) {
// if it starts with #1 and overflows then it includes more than 1 season in a batch, cant fix this cleanly, name is parsed per file basis so this shouldnt be an issue
episode = `${parseObj.episode_number[0]} ~ ${parseObj.episode_number[1]}`
} else {
if (maxep && parseInt(parseObj.episode_number[1]) > maxep) {
// get root media to start at S1, instead of S2 or some OVA due to parsing errors
// this is most likely safe, if it was relative episodes then it would likely use an accurate title for the season
// if they didnt use an accurate title then its likely an absolute numbering scheme
// parent check is to break out of those incorrectly resolved OVA's
// if we used anime season to resolve anime name, then there's no need to march into prequel!
const prequel = !parseObj.anime_season && (findEdge(media, 'PREQUEL')?.node || ((media.format === 'OVA' || media.format === 'ONA') && findEdge(media, 'PARENT')?.node))
const root = prequel && (await resolveSeason({ media: (await alRequest({ method: 'SearchIDSingle', id: prequel.id })).data.Media, force: true })).media
// if highest value is bigger than episode count or latest streamed episode +1 for safety, parseint to math.floor a number like 12.5 - specials - in 1 go
const result = await resolveSeason({ media: root || media, episode: parseObj.episode_number[1], increment: !parseObj.anime_season ? null : true })
media = result.rootMedia
const diff = parseObj.episode_number[1] - result.episode
episode = `${parseObj.episode_number[0] - diff} ~ ${result.episode}`
failed = result.failed
} else {
// cant find ep count or range seems fine
episode = `${Number(parseObj.episode_number[0])} ~ ${Number(parseObj.episode_number[1])}`
}
}
} else {
if (maxep && parseInt(parseObj.episode_number) > maxep) {
// see big comment above
const prequel = !parseObj.anime_season && (findEdge(media, 'PREQUEL')?.node || ((media.format === 'OVA' || media.format === 'ONA') && findEdge(media, 'PARENT')?.node))
const root = prequel && (await resolveSeason({ media: (await alRequest({ method: 'SearchIDSingle', id: prequel.id })).data.Media, force: true })).media
// value bigger than episode count
const result = await resolveSeason({ media: root || media, episode: parseInt(parseObj.episode_number), increment: !parseObj.anime_season ? null : true })
media = result.rootMedia
episode = result.episode
failed = result.failed
} else {
// cant find ep count or episode seems fine
episode = Number(parseObj.episode_number)
}
}
}
fileMedias.push({
episode: episode || parseObj.episode_number,
parseObject: parseObj,
media,
failed
})
}
return fileMedias
}
export function findEdge (media, type, formats = ['TV', 'TV_SHORT'], skip) {
let res = media.relations.edges.find(edge => {
if (edge.relationType === type) {
return formats.includes(edge.node.format)
}
return false
})
// this is hit-miss
if (!res && !skip && type === 'SEQUEL') res = findEdge(media, type, formats = ['TV', 'TV_SHORT', 'OVA'], true)
return res
}
// note: this doesnt cover anime which uses partially relative and partially absolute episode number, BUT IT COULD!
export async function resolveSeason (opts) {
// media, episode, increment, offset, force
if (!opts.media || !(opts.episode || opts.force)) throw new Error('No episode or media for season resolve!')
let { media, episode, increment, offset = 0, rootMedia = opts.media, force } = opts
const rootHighest = (rootMedia.nextAiringEpisode?.episode || rootMedia.episodes)
const prequel = !increment && findEdge(media, 'PREQUEL')?.node
const sequel = !prequel && (increment || increment == null) && findEdge(media, 'SEQUEL')?.node
const edge = prequel || sequel
increment = increment ?? !prequel
if (!edge) {
const obj = { media, episode: episode - offset, offset, increment, rootMedia, failed: true }
if (!force) {
console.warn('Error in parsing!', obj)
toast('Parsing Error', {
description: `Failed resolving anime episode!\n${media.title.userPreferred} - ${episode - offset}`
})
}
return obj
}
media = (await alRequest({ method: 'SearchIDSingle', id: edge.id })).data.Media
const highest = media.nextAiringEpisode?.episode || media.episodes
const diff = episode - (highest + offset)
offset += increment ? rootHighest : highest
if (increment) rootMedia = media
// force marches till end of tree, no need for checks
if (!force && diff <= rootHighest) {
episode -= offset
return { media, episode, offset, increment, rootMedia }
}
return resolveSeason({ media, episode, increment, offset, rootMedia, force })
}
const relations = {}
export const formatMap = {
TV: 'TV Series',
TV_SHORT: 'TV Short',
MOVIE: 'Movie',
SPECIAL: 'Special',
OVA: 'OVA',
ONA: 'ONA',
MUSIC: 'Music',
undefined: 'N/A',
null: 'N/A'
}
export const statusColorMap = {
CURRENT: 'rgb(61,180,242)',
PLANNING: 'rgb(247,154,99)',
COMPLETED: 'rgb(123,213,85)',
PAUSED: 'rgb(250,122,122)',
REPEATING: '#3baeea',
DROPPED: 'rgb(232,93,117)'
}
export async function playMedia (media) {
let ep = 1
if (media.mediaListEntry) {
const { status, progress } = media.mediaListEntry
if (progress) {
if (status === 'COMPLETED') {
await setStatus('REPEATING', { episode: 0 }, media)
} else {
ep = Math.min(getMediaMaxEp(media, true), progress + 1)
}
}
}
playAnime(media, ep, true)
media = null
}
export function setStatus (status, other = {}, media) {
const variables = {
method: 'Entry',
id: media.id,
status,
...other
}
return alRequest(variables)
}
const episodeMetadataMap = {}
export async function getEpisodeMetadataForMedia (media) {
if (episodeMetadataMap[media.id]) return episodeMetadataMap[media.id]
const res = await fetch('https://api.ani.zip/mappings?anilist_id=' + media.id)
const { episodes } = await res.json()
episodeMetadataMap[media.id] = episodes
return episodes
}
let seadex = []
requestIdleCallback(async () => {
const res = await fetch('https://sneedex.moe/api/public/nyaa')
const json = await res.json()
seadex = json.flatMap(({ nyaaIDs }) => nyaaIDs).sort((a, b) => a - b) // sort for binary search
})
export function mapBestRelease (entries) {
return entries.map(entry => {
if (entry.id) {
if (entry.id === '?') return entry
if (binarySearch(seadex, entry.id)) entry.best = true
return entry
}
const match = entry.link.match(/\d+/i)
if (match && binarySearch(seadex, Number(match[0]))) entry.best = true
return entry
})
}
import { DOMPARSER, PromiseBatch, binarySearch } from './util.js'
import { alRequest, alSearch } from './anilist.js'
import _anitomyscript from 'anitomyscript'
import { toast } from 'svelte-sonner'
import SectionsManager from './sections.js'
import { page } from '@/App.svelte'
import clipboard from './clipboard.js'
import { search, key } from '@/views/Search.svelte'
import { playAnime } from '../views/RSSView.svelte'
const imageRx = /\.(jpeg|jpg|gif|png|webp)/i
clipboard.on('files', ({ detail }) => {
for (const file of detail) {
if (file.type.startsWith('image')) {
toast.promise(traceAnime(file), {
description: 'You can also paste an URL to an image.',
loading: 'Looking up anime for image...',
success: 'Found anime for image!',
error: 'Couldn\'t find anime for specified image! Try to remove black bars, or use a more detailed image.'
})
}
}
})
clipboard.on('text', ({ detail }) => {
for (const { type, text } of detail) {
let src = null
if (type === 'text/html') {
src = DOMPARSER(text, 'text/html').querySelectorAll('img')[0]?.src
} else if (imageRx.exec(text)) {
src = text
}
if (src) {
toast.promise(traceAnime(src), {
description: 'You can also paste an URL to an image.',
loading: 'Looking up anime for image...',
success: 'Found anime for image!',
error: 'Couldn\'t find anime for specified image! Try to remove black bars, or use a more detailed image.'
})
}
}
})
export async function traceAnime (image) { // WAIT lookup logic
let options
let url = `https://api.trace.moe/search?cutBorders&url=${image}`
if (image instanceof Blob) {
options = {
method: 'POST',
body: image,
headers: { 'Content-type': image.type }
}
url = 'https://api.trace.moe/search'
}
const res = await fetch(url, options)
const { result } = await res.json()
if (result?.length) {
const ids = result.map(({ anilist }) => anilist)
search.value = {
clearNext: true,
load: (page = 1, perPage = 50, variables = {}) => {
const res = alRequest({ method: 'SearchIDS', page, perPage, id: ids, ...SectionsManager.sanitiseObject(variables) }).then(res => {
for (const index in res.data?.Page?.media) {
const media = res.data.Page.media[index]
const counterpart = result.find(({ anilist }) => anilist === media.id)
res.data.Page.media[index] = {
media,
episode: counterpart.episode,
similarity: counterpart.similarity,
episodeData: {
image: counterpart.image,
video: counterpart.video
}
}
}
res.data?.Page?.media.sort((a, b) => b.similarity - a.similarity)
return res
})
return SectionsManager.wrapResponse(res, result.length, 'episode')
}
}
key.value = {}
page.value = 'search'
} else {
throw new Error('Search Failed \n Couldn\'t find anime for specified image! Try to remove black bars, or use a more detailed image.')
}
}
function constructChapters (results, duration) {
const chapters = results.map(result => {
const diff = duration - result.episodeLength
return {
start: (result.interval.startTime + diff) * 1000,
end: (result.interval.endTime + diff) * 1000,
text: result.skipType.toUpperCase()
}
})
const ed = chapters.find(({ text }) => text === 'ED')
const recap = chapters.find(({ text }) => text === 'RECAP')
if (recap) recap.text = 'Recap'
chapters.sort((a, b) => a - b)
if ((chapters[0].start | 0) !== 0) {
chapters.unshift({ start: 0, end: chapters[0].start, text: chapters[0].text === 'OP' ? 'Intro' : 'Episode' })
}
if (ed) {
if ((ed.end | 0) + 5000 - duration * 1000 < 0) {
chapters.push({ start: ed.end, end: duration * 1000, text: 'Preview' })
}
} else if ((chapters[chapters.length - 1].end | 0) + 5000 - duration * 1000 < 0) {
chapters.push({
start: chapters[chapters.length - 1].end,
end: duration * 1000,
text: 'Episode'
})
}
for (let i = 0, len = chapters.length - 2; i <= len; ++i) {
const current = chapters[i]
const next = chapters[i + 1]
if ((current.end | 0) !== (next.start | 0)) {
chapters.push({
start: current.end,
end: next.start,
text: 'Episode'
})
}
}
chapters.sort((a, b) => a.start - b.start)
return chapters
}
export async function getChaptersAniSkip (file, duration) {
const resAccurate = await fetch(`https://api.aniskip.com/v2/skip-times/${file.media.media.idMal}/${file.media.episode}/?episodeLength=${duration}&types=op&types=ed&types=recap`)
const jsonAccurate = await resAccurate.json()
const resRough = await fetch(`https://api.aniskip.com/v2/skip-times/${file.media.media.idMal}/${file.media.episode}/?episodeLength=0&types=op&types=ed&types=recap`)
const jsonRough = await resRough.json()
const map = {}
for (const result of [...jsonAccurate.results, ...jsonRough.results]) {
map[result.skipType] ||= result
}
const results = Object.values(map)
if (!results.length) return []
return constructChapters(results, duration)
}
export function getMediaMaxEp (media, playable) {
if (playable) {
return media.nextAiringEpisode?.episode - 1 || media.airingSchedule?.nodes?.[0]?.episode - 1 || media.episodes
} else {
return media.episodes || media.nextAiringEpisode?.episode - 1 || media.airingSchedule?.nodes?.[0]?.episode - 1
}
}
// resolve anime name based on file name and store it
const postfix = {
1: 'st',
2: 'nd',
3: 'rd'
}
async function resolveTitle (parseObject) {
const name = parseObject.anime_title
const method = { name, method: 'SearchName', perPage: 10, status: ['RELEASING', 'FINISHED'], sort: 'SEARCH_MATCH' }
if (parseObject.anime_year) method.year = parseObject.anime_year
// inefficient but readable
let media = null
try {
// change S2 into Season 2 or 2nd Season
const match = method.name.match(/ S(\d+)/)
const oldname = method.name
if (match) {
if (Number(match[1]) === 1) { // if this is S1, remove the " S1" or " S01"
method.name = method.name.replace(/ S(\d+)/, '')
media = (await alSearch(method)).data.Page.media[0]
} else {
method.name = method.name.replace(/ S(\d+)/, ` ${Number(match[1])}${postfix[Number(match[1])] || 'th'} Season`)
media = (await alSearch(method)).data.Page.media[0]
if (!media) {
method.name = oldname.replace(/ S(\d+)/, ` Season ${Number(match[1])}`)
media = (await alSearch(method)).data.Page.media[0]
}
}
} else {
media = (await alSearch(method)).data.Page.media[0]
}
// remove - :
if (!media) {
const match = method.name.match(/[-:]/g)
if (match) {
method.name = method.name.replace(/[-:]/g, '')
media = (await alSearch(method)).data.Page.media[0]
}
}
// remove (TV)
if (!media) {
const match = method.name.match(/\(TV\)/)
if (match) {
method.name = method.name.replace('(TV)', '')
media = (await alSearch(method)).data.Page.media[0]
}
}
// check adult
if (!media) {
method.isAdult = true
media = (await alSearch(method)).data.Page.media[0]
}
} catch (e) { }
if (media) relations[getRelationKey(parseObject)] = media
}
// utility method for correcting anitomyscript woes for what's needed
export async function anitomyscript (...args) {
// @ts-ignore
const res = await _anitomyscript(...args)
const parseObjs = Array.isArray(res) ? res : [res]
for (const obj of parseObjs) {
const seasonMatch = obj.anime_title.match(/S(\d{2})E(\d{2})/)
if (seasonMatch) {
obj.anime_season = seasonMatch[1]
obj.episode_number = seasonMatch[2]
obj.anime_title = obj.anime_title.replace(/S(\d{2})E(\d{2})/, '')
}
const yearMatch = obj.anime_title.match(/ (19[5-9]\d|20\d{2})/)
if (yearMatch && Number(yearMatch[1]) <= (new Date().getUTCFullYear() + 1)) {
obj.anime_year = yearMatch[1]
obj.anime_title = obj.anime_title.replace(/ (19[5-9]\d|20\d{2})/, '')
}
if (Number(obj.anime_season) > 1) obj.anime_title += ' S' + obj.anime_season
}
return parseObjs
}
function getRelationKey (obj) {
let key = obj.anime_title
if (obj.anime_year) key += obj.anime_year
return key
}
// TODO: anidb aka true episodes need to be mapped to anilist episodes a bit better
export async function resolveFileMedia (fileName) {
const parseObjs = await anitomyscript(fileName)
// batches promises in 10 at a time, because of CF burst protection, which still sometimes gets triggered :/
const uniq = {}
for (const obj of parseObjs) {
const key = getRelationKey(obj)
if (key in relations) continue
uniq[key] = obj
}
await PromiseBatch(resolveTitle, Object.values(uniq), 10)
const fileMedias = []
for (const parseObj of parseObjs) {
let failed = false
let episode
let media = relations[getRelationKey(parseObj)]
// resolve episode, if movie, dont.
const maxep = media?.nextAiringEpisode?.episode || media?.episodes
if ((media?.format !== 'MOVIE' || maxep) && parseObj.episode_number) {
if (Array.isArray(parseObj.episode_number)) {
// is an episode range
if (parseInt(parseObj.episode_number[0]) === 1) {
// if it starts with #1 and overflows then it includes more than 1 season in a batch, cant fix this cleanly, name is parsed per file basis so this shouldnt be an issue
episode = `${parseObj.episode_number[0]} ~ ${parseObj.episode_number[1]}`
} else {
if (maxep && parseInt(parseObj.episode_number[1]) > maxep) {
// get root media to start at S1, instead of S2 or some OVA due to parsing errors
// this is most likely safe, if it was relative episodes then it would likely use an accurate title for the season
// if they didnt use an accurate title then its likely an absolute numbering scheme
// parent check is to break out of those incorrectly resolved OVA's
// if we used anime season to resolve anime name, then there's no need to march into prequel!
const prequel = !parseObj.anime_season && (findEdge(media, 'PREQUEL')?.node || ((media.format === 'OVA' || media.format === 'ONA') && findEdge(media, 'PARENT')?.node))
const root = prequel && (await resolveSeason({ media: (await alRequest({ method: 'SearchIDSingle', id: prequel.id })).data.Media, force: true })).media
// if highest value is bigger than episode count or latest streamed episode +1 for safety, parseint to math.floor a number like 12.5 - specials - in 1 go
const result = await resolveSeason({ media: root || media, episode: parseObj.episode_number[1], increment: !parseObj.anime_season ? null : true })
media = result.rootMedia
const diff = parseObj.episode_number[1] - result.episode
episode = `${parseObj.episode_number[0] - diff} ~ ${result.episode}`
failed = result.failed
} else {
// cant find ep count or range seems fine
episode = `${Number(parseObj.episode_number[0])} ~ ${Number(parseObj.episode_number[1])}`
}
}
} else {
if (maxep && parseInt(parseObj.episode_number) > maxep) {
// see big comment above
const prequel = !parseObj.anime_season && (findEdge(media, 'PREQUEL')?.node || ((media.format === 'OVA' || media.format === 'ONA') && findEdge(media, 'PARENT')?.node))
const root = prequel && (await resolveSeason({ media: (await alRequest({ method: 'SearchIDSingle', id: prequel.id })).data.Media, force: true })).media
// value bigger than episode count
const result = await resolveSeason({ media: root || media, episode: parseInt(parseObj.episode_number), increment: !parseObj.anime_season ? null : true })
media = result.rootMedia
episode = result.episode
failed = result.failed
} else {
// cant find ep count or episode seems fine
episode = Number(parseObj.episode_number)
}
}
}
fileMedias.push({
episode: episode || parseObj.episode_number,
parseObject: parseObj,
media,
failed
})
}
return fileMedias
}
export function findEdge (media, type, formats = ['TV', 'TV_SHORT'], skip) {
let res = media.relations.edges.find(edge => {
if (edge.relationType === type) {
return formats.includes(edge.node.format)
}
return false
})
// this is hit-miss
if (!res && !skip && type === 'SEQUEL') res = findEdge(media, type, formats = ['TV', 'TV_SHORT', 'OVA'], true)
return res
}
// note: this doesnt cover anime which uses partially relative and partially absolute episode number, BUT IT COULD!
export async function resolveSeason (opts) {
// media, episode, increment, offset, force
if (!opts.media || !(opts.episode || opts.force)) throw new Error('No episode or media for season resolve!')
let { media, episode, increment, offset = 0, rootMedia = opts.media, force } = opts
const rootHighest = (rootMedia.nextAiringEpisode?.episode || rootMedia.episodes)
const prequel = !increment && findEdge(media, 'PREQUEL')?.node
const sequel = !prequel && (increment || increment == null) && findEdge(media, 'SEQUEL')?.node
const edge = prequel || sequel
increment = increment ?? !prequel
if (!edge) {
const obj = { media, episode: episode - offset, offset, increment, rootMedia, failed: true }
if (!force) {
console.warn('Error in parsing!', obj)
toast('Parsing Error', {
description: `Failed resolving anime episode!\n${media.title.userPreferred} - ${episode - offset}`
})
}
return obj
}
media = (await alRequest({ method: 'SearchIDSingle', id: edge.id })).data.Media
const highest = media.nextAiringEpisode?.episode || media.episodes
const diff = episode - (highest + offset)
offset += increment ? rootHighest : highest
if (increment) rootMedia = media
// force marches till end of tree, no need for checks
if (!force && diff <= rootHighest) {
episode -= offset
return { media, episode, offset, increment, rootMedia }
}
return resolveSeason({ media, episode, increment, offset, rootMedia, force })
}
const relations = {}
export const formatMap = {
TV: 'TV Series',
TV_SHORT: 'TV Short',
MOVIE: 'Movie',
SPECIAL: 'Special',
OVA: 'OVA',
ONA: 'ONA',
MUSIC: 'Music',
undefined: 'N/A',
null: 'N/A'
}
export const statusColorMap = {
CURRENT: 'rgb(61,180,242)',
PLANNING: 'rgb(247,154,99)',
COMPLETED: 'rgb(123,213,85)',
PAUSED: 'rgb(250,122,122)',
REPEATING: '#3baeea',
DROPPED: 'rgb(232,93,117)'
}
export async function playMedia (media) {
let ep = 1
if (media.mediaListEntry) {
const { status, progress } = media.mediaListEntry
if (progress) {
if (status === 'COMPLETED') {
await setStatus('REPEATING', { episode: 0 }, media)
} else {
ep = Math.min(getMediaMaxEp(media, true), progress + 1)
}
}
}
playAnime(media, ep, true)
media = null
}
export function setStatus (status, other = {}, media) {
const variables = {
method: 'Entry',
id: media.id,
status,
...other
}
return alRequest(variables)
}
const episodeMetadataMap = {}
export async function getEpisodeMetadataForMedia (media) {
if (episodeMetadataMap[media.id]) return episodeMetadataMap[media.id]
const res = await fetch('https://api.ani.zip/mappings?anilist_id=' + media.id)
const { episodes } = await res.json()
episodeMetadataMap[media.id] = episodes
return episodes
}
let seadex = []
requestIdleCallback(async () => {
const res = await fetch('https://sneedex.moe/api/public/nyaa')
const json = await res.json()
seadex = json.flatMap(({ nyaaIDs }) => nyaaIDs).sort((a, b) => a - b) // sort for binary search
})
export function mapBestRelease (entries) {
return entries.map(entry => {
if (entry.id) {
if (entry.id === '?') return entry
if (binarySearch(seadex, entry.id)) entry.best = true
return entry
}
const match = entry.link.match(/\d+/i)
if (match && binarySearch(seadex, Number(match[0]))) entry.best = true
return entry
})
}

23
common/package.json Normal file
View file

@ -0,0 +1,23 @@
{
"name": "common",
"private": true,
"dependencies": {
"@fontsource-variable/material-symbols-outlined": "latest",
"@fontsource-variable/nunito": "latest",
"@fontsource/roboto": "latest",
"anitomyscript": "github:ThaUnknown/anitomyscript#42290c4b3f256893be08a4e89051f448ff5e9d00",
"bottleneck": "^2.19.5",
"browser-event-target-emitter": "^1.0.1",
"jassub": "latest",
"js-levenshtein": "^1.1.6",
"p2pt": "github:ThaUnknown/p2pt#modernise",
"perfect-seekbar": "^1.1.0",
"quartermoon": "^1.2.3",
"simple-store-svelte": "^1.0.1",
"svelte": "^4.2.3",
"svelte-keybinds": "^1.0.5",
"svelte-loader": "^3.1.9",
"svelte-miniplayer": "^1.0.3",
"svelte-sonner": "^0.3.3"
}
}

View file

Before

Width:  |  Height:  |  Size: 106 KiB

After

Width:  |  Height:  |  Size: 106 KiB

View file

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

15
common/tsconfig.json Normal file
View file

@ -0,0 +1,15 @@
{
"compilerOptions": {
"baseUrl": "./",
"paths": {
"@/*": ["./*"],
},
"checkJs": true,
"target": "ESNext",
"moduleResolution": "node",
"module": "ESNext",
"types": ["./types.d.ts"],
"allowSyntheticDefaultImports": true
},
"exclude": ["node_modules/**", "**/node_modules", "dist", "build"]
}

View file

View file

@ -1,37 +1,37 @@
<script>
import { settings } from '@/modules/settings.js'
import { click } from '@/modules/click.js'
let block = false
async function testConnection () {
try {
for (let i = 0; i < 2; ++i) {
// fetch twice, sometimes it will go tru once if ISP is shitty
await fetch($settings.toshoURL + 'json?show=torrent&id=1')
}
block = false
} catch (e) {
console.error(e)
block = true
}
}
testConnection()
</script>
{#if block}
<div class='w-full h-full left-0 z-50 position-absolute content-wrapper bg-dark d-flex align-items-center justify-content-center flex-column'>
<div>
<h1 class='font-weight-bold'>Could not connect to Tosho!</h1>
<div class='font-size-16'>This happens either because Tosho is down, or because your ISP blocks Tosho, the latter being more likely.</div>
<div class='font-size-16'>Most features of Miru will not function correctly without being able to connect to Tosho.</div>
<div class='font-size-16'>If you enable a VPN a restart might be required for it to take effect.</div>
<!-- eslint-disable-next-line svelte/valid-compile -->
<div class='font-size-16'>Visit <a class='text-primary pointer' use:click={() => { window.IPC.emit('open', 'https://thewiki.moe/tutorials/unblock/') }}>this guide</a> for a tutorial on how to bypass ISP blocks.</div>
<div class='d-flex w-full mt-20 pt-20'>
<button class='btn ml-auto mr-5' type='button' on:click={() => { block = false }}>I Understand</button>
<button class='btn btn-primary mr-5' type='button' on:click={() => { window.IPC.emit('open', 'https://thewiki.moe/tutorials/unblock/') }}>Open Guide</button>
<button class='btn btn-primary' type='button' on:click={testConnection}>Reconnect</button>
</div>
</div>
</div>
{/if}
<script>
import { settings } from '@/modules/settings.js'
import { click } from '@/modules/click.js'
let block = false
async function testConnection () {
try {
for (let i = 0; i < 2; ++i) {
// fetch twice, sometimes it will go tru once if ISP is shitty
await fetch($settings.toshoURL + 'json?show=torrent&id=1')
}
block = false
} catch (e) {
console.error(e)
block = true
}
}
testConnection()
</script>
{#if block}
<div class='w-full h-full left-0 z-50 position-absolute content-wrapper bg-dark d-flex align-items-center justify-content-center flex-column'>
<div>
<h1 class='font-weight-bold'>Could not connect to Tosho!</h1>
<div class='font-size-16'>This happens either because Tosho is down, or because your ISP blocks Tosho, the latter being more likely.</div>
<div class='font-size-16'>Most features of Miru will not function correctly without being able to connect to Tosho.</div>
<div class='font-size-16'>If you enable a VPN a restart might be required for it to take effect.</div>
<!-- eslint-disable-next-line svelte/valid-compile -->
<div class='font-size-16'>Visit <a class='text-primary pointer' use:click={() => { window.IPC.emit('open', 'https://thewiki.moe/tutorials/unblock/') }}>this guide</a> for a tutorial on how to bypass ISP blocks.</div>
<div class='d-flex w-full mt-20 pt-20'>
<button class='btn ml-auto mr-5' type='button' on:click={() => { block = false }}>I Understand</button>
<button class='btn btn-primary mr-5' type='button' on:click={() => { window.IPC.emit('open', 'https://thewiki.moe/tutorials/unblock/') }}>Open Guide</button>
<button class='btn btn-primary' type='button' on:click={testConnection}>Reconnect</button>
</div>
</div>
</div>
{/if}

View file

@ -1,75 +1,75 @@
<script context='module'>
import { writable } from 'simple-store-svelte'
import SectionsManager from '@/modules/sections.js'
export const search = writable({})
const items = writable([])
export const key = writable({})
</script>
<script>
import Search, { searchCleanup } from '../components/Search.svelte'
import Card from '../components/cards/Card.svelte'
import { hasNextPage } from '@/modules/sections.js'
import smoothScroll from '@/modules/scroll.js'
import { debounce } from '@/modules/util.js'
import { onDestroy, onMount } from 'svelte'
let page = 0
items.value = []
let container = null
function loadSearchData () {
const load = $search.load || SectionsManager.createFallbackLoad()
const nextData = load(++page, undefined, searchCleanup($search))
$items = [...$items, ...nextData]
return nextData[nextData.length - 1].data
}
const update = debounce(() => {
$key = {}
}, 300)
$: loadTillFull($key)
let canScroll = true
async function loadTillFull (_key) {
if (!container) return
const cachedKey = $key
canScroll = false
page = 0
items.value = []
hasNextPage.value = true
await loadSearchData()
// eslint-disable-next-line no-unmodified-loop-condition
while (hasNextPage.value && container && cachedKey === $key && container.scrollHeight <= container.clientHeight) {
canScroll = false
await loadSearchData()
}
canScroll = true
}
async function infiniteScroll () {
if (canScroll && $hasNextPage && this.scrollTop + this.clientHeight > this.scrollHeight - 800) {
canScroll = false
await loadSearchData()
canScroll = true
}
}
onDestroy(() => {
if ($search.clearNext || $search.disableSearch) $search = {}
})
onMount(loadTillFull)
</script>
<div class='bg-dark h-full w-full overflow-y-scroll d-flex flex-wrap flex-row root overflow-x-hidden justify-content-center align-content-start' use:smoothScroll bind:this={container} on:scroll={infiniteScroll}>
<Search bind:search={$search} on:input={update} />
<div class='h-full w-full d-flex flex-wrap flex-row px-50 justify-content-center align-content-start'>
{#key $key}
{#each $items as card}
<Card {card} />
{/each}
{/key}
</div>
</div>
<script context='module'>
import { writable } from 'simple-store-svelte'
import SectionsManager from '@/modules/sections.js'
export const search = writable({})
const items = writable([])
export const key = writable({})
</script>
<script>
import Search, { searchCleanup } from '../components/Search.svelte'
import Card from '../components/cards/Card.svelte'
import { hasNextPage } from '@/modules/sections.js'
import smoothScroll from '@/modules/scroll.js'
import { debounce } from '@/modules/util.js'
import { onDestroy, onMount } from 'svelte'
let page = 0
items.value = []
let container = null
function loadSearchData () {
const load = $search.load || SectionsManager.createFallbackLoad()
const nextData = load(++page, undefined, searchCleanup($search))
$items = [...$items, ...nextData]
return nextData[nextData.length - 1].data
}
const update = debounce(() => {
$key = {}
}, 300)
$: loadTillFull($key)
let canScroll = true
async function loadTillFull (_key) {
if (!container) return
const cachedKey = $key
canScroll = false
page = 0
items.value = []
hasNextPage.value = true
await loadSearchData()
// eslint-disable-next-line no-unmodified-loop-condition
while (hasNextPage.value && container && cachedKey === $key && container.scrollHeight <= container.clientHeight) {
canScroll = false
await loadSearchData()
}
canScroll = true
}
async function infiniteScroll () {
if (canScroll && $hasNextPage && this.scrollTop + this.clientHeight > this.scrollHeight - 800) {
canScroll = false
await loadSearchData()
canScroll = true
}
}
onDestroy(() => {
if ($search.clearNext || $search.disableSearch) $search = {}
})
onMount(loadTillFull)
</script>
<div class='bg-dark h-full w-full overflow-y-scroll d-flex flex-wrap flex-row root overflow-x-hidden justify-content-center align-content-start' use:smoothScroll bind:this={container} on:scroll={infiniteScroll}>
<Search bind:search={$search} on:input={update} />
<div class='h-full w-full d-flex flex-wrap flex-row px-50 justify-content-center align-content-start'>
{#key $key}
{#each $items as card}
<Card {card} />
{/each}
{/key}
</div>
</div>

98
common/webpack.config.cjs Normal file
View file

@ -0,0 +1,98 @@
const { join, resolve } = require('path')
const mode = process.env.NODE_ENV?.trim() || 'development'
const isDev = mode === 'development'
const HtmlWebpackPlugin = require('html-webpack-plugin')
const MiniCssExtractPlugin = require('mini-css-extract-plugin')
const CopyWebpackPlugin = require('copy-webpack-plugin')
module.exports = (parentDir, alias = {}) => ({
devtool: 'source-map',
entry: join(__dirname, 'main.js'),
output: {
path: join(parentDir, 'build'),
filename: 'renderer.js'
},
mode,
module: {
rules: [
{
test: /\.svelte$/,
use: {
loader: 'svelte-loader',
options: {
compilerOptions: {
dev: isDev
},
emitCss: !isDev,
hotReload: isDev
}
}
},
{
test: /\.css$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: 'css-loader',
options: {
sourceMap: true
}
}
]
},
{
// required to prevent errors from Svelte on Webpack 5+
test: /node_modules\/svelte\/.*\.mjs$/,
resolve: {
fullySpecified: false
}
}
]
},
resolve: {
aliasFields: ['browser'],
alias: {
...alias,
'@': __dirname,
module: false,
url: false,
'bittorrent-tracker/lib/client/websocket-tracker.js': resolve('../node_modules/bittorrent-tracker/lib/client/websocket-tracker.js')
},
extensions: ['.mjs', '.js', '.svelte']
},
plugins: [
new MiniCssExtractPlugin({
filename: '[name].css'
}),
new CopyWebpackPlugin({
patterns: [
{ from: join(__dirname, 'public') }
]
}),
new HtmlWebpackPlugin({
filename: 'app.html',
inject: false,
templateContent: ({ htmlWebpackPlugin }) => /* html */`
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset='utf-8'>
<meta name='viewport' content='width=device-width,initial-scale=1'>
<meta name="theme-color" content="#191c20">
<title>Miru</title>
<!-- <link rel="preconnect" href="https://www.youtube-nocookie.com"> -->
<link rel="preconnect" href="https://graphql.anilist.co">
<link rel='icon' href='/logo.ico'>
${htmlWebpackPlugin.tags.headTags}
</head>
<body class="dark-mode with-custom-webkit-scrollbars with-custom-css-scrollbars">
${htmlWebpackPlugin.tags.bodyTags}
</body>
</html> `
})],
target: 'web'
})

View file

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

112
electron/package.json Normal file
View file

@ -0,0 +1,112 @@
{
"name": "Miru",
"version": "4.4.18",
"private": true,
"author": "ThaUnknown_ <ThaUnknown@users.noreply.github.com>",
"description": "Stream anime torrents, real-time with no waiting for downloads.",
"main": "build/main.js",
"homepage": "https://github.com/ThaUnknown/miru#readme",
"scripts": {
"start": "cross-env NODE_ENV=development concurrently --kill-others \"npm run web:watch\" \"npm run electron:start\"",
"web:watch": "webpack serve",
"web:build": "cross-env NODE_ENV=production webpack build",
"electron:start": "electron ./build/main.js",
"build": "npm run web:build && electron-builder",
"publish": "npm run web:build && electron-builder -p always"
},
"devDependencies": {
"@electron/notarize": "^2.1.0",
"common": "workspace:*",
"discord-rpc": "4.0.1",
"electron": "25.1.0",
"electron-builder": "^24.6.4",
"electron-log": "^4.4.8",
"electron-updater": "^6.1.4"
},
"dependencies": {
"utp-native": "^2.5.3"
},
"standard": {
"ignore": [
"bundle.js",
"bundle.map.js"
],
"env": [
"browser",
"node"
]
},
"build": {
"directories": {
"buildResources": "buildResources"
},
"asarUnpack": "**/*.node",
"electronDownload": {
"mirror": "https://github.com/aa910d571134/feb7c2e1a10f/releases/download/",
"version": "25.1.0",
"customDir": "2ffc48f0b43f"
},
"protocols": {
"name": "miru",
"schemes": [
"miru"
]
},
"publish": [
{
"provider": "github",
"owner": "ThaUnknown",
"repo": "miru"
}
],
"afterSign": "./buildResources/notarize.js",
"appId": "com.github.thaunknown.miru",
"productName": "Miru",
"files": [
"build/**/*",
"!node_modules/**/*.{mk,a,o,h}"
],
"mac": {
"artifactName": "${os}-${name}-${version}.${ext}",
"singleArchFiles": "node_modules/+(register-scheme|utp-native|fs-native-extensions)/**",
"category": "public.app-category.video",
"icon": "buildResources/icon.icns",
"target": [
{
"arch": "universal",
"target": "dmg"
}
]
},
"win": {
"artifactName": "${os}-${name}-${version}.${ext}",
"target": "nsis"
},
"linux": {
"artifactName": "${os}-${name}-${version}.${ext}",
"category": "AudioVideo;Video",
"description": "Bittorrent streaming software for cats",
"desktop": {
"Name": "Miru",
"Comment": "Bittorrent streaming software for cats",
"Keywords": "anime",
"Type": "Application",
"MimeType": "x-scheme-handler/miru;"
},
"target": [
{
"arch": "x64",
"target": "AppImage"
},
{
"arch": "x64",
"target": "deb"
}
]
},
"nsis": {
"allowToChangeInstallationDirectory": true,
"oneClick": false
}
}
}

View file

@ -3,7 +3,7 @@ import { ipcRenderer } from 'electron'
import HTTPTracker from 'bittorrent-tracker/lib/client/http-tracker.js'
import { hex2bin, arr2hex, text2arr } from 'uint8-util'
import Parser from './parser.js'
import { defaults, fontRx, subRx, videoRx } from '../common/util.js'
import { defaults, fontRx, subRx, videoRx } from 'common/util.js'
import { statfs } from 'fs/promises'
const LARGE_FILESIZE = 32_000_000_000

View file

@ -1,4 +1,4 @@
import { fontRx } from '../common/util.js'
import { fontRx } from 'common/util.js'
import Metadata from 'matroska-metadata'
export default class Parser {

View file

@ -0,0 +1,70 @@
const { join, resolve } = require('path')
const HtmlWebpackPlugin = require('html-webpack-plugin')
const mode = process.env.NODE_ENV?.trim() || 'development'
const commonConfig = require('common/webpack.config.cjs')
module.exports = [
{
devtool: 'source-map',
entry: join(__dirname, 'src', 'background', 'background.js'),
output: {
path: join(__dirname, 'build'),
filename: 'background.js'
},
mode,
externals: {
'utp-native': 'require("utp-native")'
},
resolve: {
aliasFields: [],
mainFields: ['module', 'main', 'node'],
alias: {
'node-fetch': false,
ws: false,
wrtc: false,
'bittorrent-tracker/lib/client/http-tracker.js': resolve('../node_modules/bittorrent-tracker/lib/client/http-tracker.js')
}
},
plugins: [new HtmlWebpackPlugin({ filename: 'background.html' })],
target: 'electron20.0-renderer',
devServer: {
devMiddleware: {
writeToDisk: true
},
hot: true,
client: {
overlay: { errors: true, warnings: false, runtimeErrors: false }
},
port: 5000
}
},
commonConfig(__dirname),
{
devtool: 'source-map',
entry: join(__dirname, 'src', 'preload', 'preload.js'),
output: {
path: join(__dirname, 'build'),
filename: 'preload.js'
},
resolve: {
aliasFields: []
},
mode,
target: 'electron20.0-preload'
},
{
devtool: 'source-map',
entry: join(__dirname, 'src', 'main', 'main.js'),
output: {
path: join(__dirname, 'build'),
filename: 'main.js'
},
resolve: {
aliasFields: []
},
mode,
target: 'electron20.0-main'
}
]

View file

@ -1,14 +1,9 @@
{
"compilerOptions": {
"baseUrl": "./",
"paths": {
"@/*": ["src/renderer/*"],
},
"checkJs": true,
"target": "ESNext",
"moduleResolution": "node",
"module": "ESNext",
"types": ["./types.d.ts"],
"allowSyntheticDefaultImports": true
},
"exclude": ["node_modules/**", "**/node_modules", "dist", "build"]

View file

@ -1,62 +1,10 @@
{
"name": "Miru",
"version": "4.4.18",
"private": true,
"author": "ThaUnknown_ <ThaUnknown@users.noreply.github.com>",
"description": "Stream anime torrents, real-time with no waiting for downloads.",
"main": "build/main.js",
"homepage": "https://github.com/ThaUnknown/miru#readme",
"scripts": {
"start": "cross-env NODE_ENV=development concurrently --kill-others \"npm run web:watch\" \"npm run electron:start\"",
"web:watch": "webpack serve",
"web:build": "cross-env NODE_ENV=production webpack build",
"electron:start": "electron ./build/main.js",
"build": "npm run web:build && electron-builder",
"publish": "npm run web:build && electron-builder -p always"
},
"devDependencies": {
"@electron/notarize": "^2.1.0",
"@fontsource-variable/material-symbols-outlined": "^5.0.15",
"@fontsource-variable/nunito": "^5.0.15",
"@fontsource/roboto": "^5.0.8",
"@typescript-eslint/parser": "^6.8.0",
"anitomyscript": "github:ThaUnknown/anitomyscript#42290c4b3f256893be08a4e89051f448ff5e9d00",
"bottleneck": "^2.19.5",
"browser-event-target-emitter": "^1.0.1",
"concurrently": "^8.2.2",
"copy-webpack-plugin": "^11.0.0",
"cross-env": "^7.0.3",
"css-loader": "^6.8.1",
"discord-rpc": "4.0.1",
"electron": "25.1.0",
"electron-builder": "^24.6.4",
"electron-log": "^4.4.8",
"electron-updater": "^6.1.4",
"eslint": "^8.52.0",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-svelte": "^2.34.0",
"html-webpack-plugin": "^5.5.3",
"jassub": "1.7.9",
"js-levenshtein": "^1.1.6",
"matroska-metadata": "^1.0.3",
"mini-css-extract-plugin": "^2.7.6",
"p2pt": "github:ThaUnknown/p2pt#modernise",
"perfect-seekbar": "^1.1.0",
"quartermoon": "^1.2.3",
"simple-store-svelte": "^1.0.1",
"svelte": "^4.2.2",
"svelte-eslint-parser": "^0.33.1",
"svelte-keybinds": "1.0.5",
"svelte-loader": "^3.1.9",
"svelte-miniplayer": "1.0.3",
"svelte-sonner": "^0.3.0",
"webpack": "^5.89.0",
"webpack-cli": "^5.1.4",
"webpack-dev-server": "^4.15.1",
"webtorrent": "^2.1.28"
},
"dependencies": {
"utp-native": "^2.5.3"
},
"standard": {
"ignore": [
"bundle.js",
@ -67,77 +15,21 @@
"node"
]
},
"build": {
"directories": {
"buildResources": "buildResources"
},
"asarUnpack": "**/*.node",
"electronDownload": {
"mirror": "https://github.com/aa910d571134/feb7c2e1a10f/releases/download/",
"version": "25.1.0",
"customDir": "2ffc48f0b43f"
},
"protocols": {
"name": "miru",
"schemes": [
"miru"
]
},
"publish": [
{
"provider": "github",
"owner": "ThaUnknown",
"repo": "miru"
}
],
"afterSign": "./buildResources/notarize.js",
"appId": "com.github.thaunknown.miru",
"productName": "Miru",
"files": [
"build/**/*",
"!node_modules/**/*.{mk,a,o,h}"
],
"mac": {
"artifactName": "${os}-${name}-${version}.${ext}",
"singleArchFiles": "node_modules/+(register-scheme|utp-native|fs-native-extensions)/**",
"category": "public.app-category.video",
"icon": "buildResources/icon.icns",
"target": [
{
"arch": "universal",
"target": "dmg"
}
]
},
"win": {
"artifactName": "${os}-${name}-${version}.${ext}",
"target": "nsis"
},
"linux": {
"artifactName": "${os}-${name}-${version}.${ext}",
"category": "AudioVideo;Video",
"description": "Bittorrent streaming software for cats",
"desktop": {
"Name": "Miru",
"Comment": "Bittorrent streaming software for cats",
"Keywords": "anime",
"Type": "Application",
"MimeType": "x-scheme-handler/miru;"
},
"target": [
{
"arch": "x64",
"target": "AppImage"
},
{
"arch": "x64",
"target": "deb"
}
]
},
"nsis": {
"allowToChangeInstallationDirectory": true,
"oneClick": false
}
"dependencies": {
"@typescript-eslint/parser": "^6.10.0",
"concurrently": "^8.2.2",
"copy-webpack-plugin": "^11.0.0",
"cross-env": "^7.0.3",
"css-loader": "^6.8.1",
"eslint": "^8.53.0",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-svelte": "^2.35.0",
"html-webpack-plugin": "^5.5.3",
"matroska-metadata": "^1.0.3",
"mini-css-extract-plugin": "^2.7.6",
"webpack": "^5.89.0",
"webpack-cli": "^5.1.4",
"webpack-dev-server": "^4.15.1",
"webtorrent": "^2.1.29"
}
}

File diff suppressed because it is too large Load diff

0
pnpm-workspace.yaml Normal file
View file

View file

@ -1,14 +1,9 @@
{
"compilerOptions": {
"baseUrl": "./",
"paths": {
"@/*": ["src/renderer/*"],
},
"checkJs": true,
"target": "ESNext",
"moduleResolution": "node",
"module": "ESNext",
"types": ["./types.d.ts"],
"allowSyntheticDefaultImports": true
},
"exclude": ["node_modules/**", "**/node_modules", "dist", "build"]

View file

@ -1,159 +0,0 @@
const { join, resolve } = require('path')
const HtmlWebpackPlugin = require('html-webpack-plugin')
const MiniCssExtractPlugin = require('mini-css-extract-plugin')
const CopyWebpackPlugin = require('copy-webpack-plugin')
const mode = process.env.NODE_ENV?.trim() || 'development'
const isDev = mode === 'development'
module.exports = [
{
devtool: 'source-map',
entry: join(__dirname, 'src', 'background', 'background.js'),
output: {
path: join(__dirname, 'build'),
filename: 'background.js'
},
mode,
externals: {
'utp-native': 'require("utp-native")'
},
resolve: {
aliasFields: [],
mainFields: ['module', 'main', 'node'],
alias: {
'node-fetch': false,
ws: false,
wrtc: false,
'bittorrent-tracker/lib/client/http-tracker.js': resolve('node_modules/bittorrent-tracker/lib/client/http-tracker.js')
}
},
plugins: [new HtmlWebpackPlugin({ filename: 'background.html' })],
target: 'electron20.0-renderer',
devServer: {
devMiddleware: {
writeToDisk: true
},
hot: true,
client: {
overlay: { errors: true, warnings: false, runtimeErrors: false }
},
port: 5000
}
},
{
devtool: 'source-map',
entry: join(__dirname, 'src', 'renderer', 'main.js'),
output: {
path: join(__dirname, 'build'),
filename: 'renderer.js'
},
mode,
module: {
rules: [
{
test: /\.svelte$/,
use: {
loader: 'svelte-loader',
options: {
compilerOptions: {
dev: isDev
},
emitCss: !isDev,
hotReload: isDev
}
}
},
{
test: /\.css$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: 'css-loader',
options: {
sourceMap: true
}
}
]
},
{
// required to prevent errors from Svelte on Webpack 5+
test: /node_modules\/svelte\/.*\.mjs$/,
resolve: {
fullySpecified: false
}
}
]
},
resolve: {
aliasFields: ['browser'],
alias: {
'@': resolve('src/renderer'),
module: false,
url: false,
'bittorrent-tracker/lib/client/websocket-tracker.js': resolve('node_modules/bittorrent-tracker/lib/client/websocket-tracker.js')
},
extensions: ['.mjs', '.js', '.svelte']
},
plugins: [
new MiniCssExtractPlugin({
filename: '[name].css'
}),
new CopyWebpackPlugin({
patterns: [
{ from: 'src/renderer/public' }
]
}),
new HtmlWebpackPlugin({
filename: 'app.html',
inject: false,
templateContent: ({ htmlWebpackPlugin }) => /* html */`
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset='utf-8'>
<meta name='viewport' content='width=device-width,initial-scale=1'>
<meta name="theme-color" content="#191c20">
<title>Miru</title>
<link rel="preconnect" href="https://www.youtube-nocookie.com">
<link rel="preconnect" href="https://graphql.anilist.co">
<link rel='icon' href='/logo.ico'>
${htmlWebpackPlugin.tags.headTags}
</head>
<body class="dark-mode with-custom-webkit-scrollbars with-custom-css-scrollbars">
${htmlWebpackPlugin.tags.bodyTags}
</body>
</html> `
})],
target: 'web'
},
{
devtool: 'source-map',
entry: join(__dirname, 'src', 'preload', 'preload.js'),
output: {
path: join(__dirname, 'build'),
filename: 'preload.js'
},
resolve: {
aliasFields: []
},
mode,
target: 'electron20.0-preload'
},
{
devtool: 'source-map',
entry: join(__dirname, 'src', 'main', 'main.js'),
output: {
path: join(__dirname, 'build'),
filename: 'main.js'
},
resolve: {
aliasFields: []
},
mode,
target: 'electron20.0-main'
}
]