feat: use compound queries for name resolving

fix: errors on episode cards with no media
feat: use AL media cache in resolver
This commit is contained in:
ThaUnknown 2024-03-24 17:14:30 +01:00
parent 7f8dd7841e
commit c0d1f97c89
11 changed files with 142 additions and 132 deletions

View file

@ -21,7 +21,7 @@
preview = state
}
const progress = liveAnimeEpisodeProgress(media.id, data.episode)
const progress = liveAnimeEpisodeProgress(media?.id, data?.episode)
</script>
<div class='d-flex p-20 pb-10 position-relative' use:hoverClick={[data.onclick || viewMedia, setHoverState]}>

View file

@ -9,7 +9,7 @@
const episodeThumbnail = ((!media?.mediaListEntry?.status || !(media.mediaListEntry.status === 'CURRENT' && media.mediaListEntry.progress < data.episode)) && data.episodeData?.image) || media?.bannerImage || media?.coverImage.extraLarge || ' '
let hide = true
const progress = liveAnimeEpisodeProgress(media.id, data.episode)
const progress = liveAnimeEpisodeProgress(media?.id, data?.episode)
</script>
<div class='position-absolute w-400 mh-400 absolute-container top-0 m-auto bg-dark-light z-30 rounded overflow-hidden pointer d-flex flex-column'>

View file

@ -246,7 +246,7 @@ class AnilistClient {
/**
* @param {string} query
* @param {object} variables
* @param {Record<string, any>} variables
*/
alRequest (query, variables) {
const options = {
@ -256,7 +256,7 @@ class AnilistClient {
Accept: 'application/json'
},
body: JSON.stringify({
query: query.replace(/\s/g, ''),
query: query.replace(/\s/g, '').replaceAll('&nbsp;', ' '),
variables: {
sort: 'TRENDING_DESC',
page: 1,
@ -278,6 +278,59 @@ class AnilistClient {
return media.reduce((prev, curr) => prev.lavenshtein <= curr.lavenshtein ? prev : curr)
}
/**
* @param {{key: string, title: string, year?: string, isAdult: boolean}[]} flattenedTitles
**/
async alSearchCompound (flattenedTitles) {
/** @type {Record<`v${number}`, string>} */
const requestVariables = flattenedTitles.reduce((obj, { title }, i) => {
obj[`v${i}`] = title
return obj
}, {})
const queryVariables = flattenedTitles.map((_, i) => `$v${i}: String`).join(', ')
const fragmentQueries = flattenedTitles.map(({ year, isAdult }, i) => /* js */`
v${i}: Page(perPage: 10) {
media(type: ANIME, search: $v${i}, status_in: [RELEASING, FINISHED], isAdult: ${!!isAdult} ${year ? `, seasonYear: ${year}` : ''}) {
...med
}
}`)
const query = /* js */`
query(${queryVariables}){
${fragmentQueries}
}
fragment&nbsp;med&nbsp;on&nbsp;Media {
id,
title {
romaji,
english,
native
},
synonyms
}`
/**
* @type {import('./al.d.ts').Query<Record<string, {media: import('./al.d.ts').Media[]}>>}
* @returns {Promise<[string, import('./al.d.ts').Media][]>}
* */
const res = await this.alRequest(query, requestVariables)
/** @type {Record<string, number>} */
const searchResults = {}
for (const [variableName, { media }] of Object.entries(res.data)) {
if (!media.length) continue
const titleObject = flattenedTitles[Number(variableName.slice(1))]
if (searchResults[titleObject.key]) continue
searchResults[titleObject.key] = media.map(media => getDistanceFromTitle(media, titleObject.title)).reduce((prev, curr) => prev.lavenshtein <= curr.lavenshtein ? prev : curr).id
}
const ids = Object.values(searchResults)
const search = await this.searchIDS({ id: ids, perPage: 50 })
return Object.entries(searchResults).map(([filename, id]) => [filename, search.data.Page.media.find(media => media.id === id)])
}
async alEntry (filemedia) {
// check if values exist
if (filemedia.media && alToken) {

View file

@ -1,7 +1,6 @@
import { toast } from 'svelte-sonner'
import { anilistClient } from './anilist.js'
import { anitomyscript } from './anime.js'
import { PromiseBatch } from './util.js'
const postfix = {
1: 'st', 2: 'nd', 3: 'rd'
@ -22,83 +21,79 @@ export default new class AnimeResolver {
}
/**
* resolve anime name based on file name and store it
* @param {import('anitomyscript').AnitomyResult} parseObject
* @param {string} title
* @returns {string[]}
*/
async findAnimeByTitle (parseObject) {
const name = parseObject.anime_title
const variables = { name, perPage: 10, status: ['RELEASING', 'FINISHED'], sort: 'SEARCH_MATCH' }
if (parseObject.anime_year) variables.year = parseObject.anime_year
alternativeTitles (title) {
const titles = []
// inefficient but readable
let media = null
try {
// change S2 into Season 2 or 2nd Season
const match = variables.name.match(/ S(\d+)/)
const oldname = variables.name
if (match) {
if (Number(match[1]) === 1) { // if this is S1, remove the " S1" or " S01"
variables.name = variables.name.replace(/ S(\d+)/, '')
media = await anilistClient.alSearch(variables)
} else {
variables.name = variables.name.replace(/ S(\d+)/, ` ${Number(match[1])}${postfix[Number(match[1])] || 'th'} Season`)
media = await anilistClient.alSearch(variables)
if (!media) {
variables.name = oldname.replace(/ S(\d+)/, ` Season ${Number(match[1])}`)
media = await anilistClient.alSearch(variables)
}
}
let modified = title
// preemptively change S2 into Season 2 or 2nd Season, otherwise this will have accuracy issues
const seasonMatch = title.match(/ S(\d+)/)
if (seasonMatch) {
if (Number(seasonMatch[1]) === 1) { // if this is S1, remove the " S1" or " S01"
modified = title.replace(/ S(\d+)/, '')
titles.push(modified)
} else {
media = await anilistClient.alSearch(variables)
modified = title.replace(/ S(\d+)/, ` ${Number(seasonMatch[1])}${postfix[Number(seasonMatch[1])] || 'th'} Season`)
titles.push(modified)
titles.push(title.replace(/ S(\d+)/, ` Season ${Number(seasonMatch[1])}`))
}
} else {
titles.push(title)
}
// remove - :
if (!media) {
const match = variables.name.match(/[-:]/g)
if (match) {
variables.name = variables.name.replace(/[-:]/g, '')
media = await anilistClient.alSearch(variables)
}
}
// remove (TV)
if (!media) {
const match = variables.name.match(/\(TV\)/)
if (match) {
variables.name = variables.name.replace('(TV)', '')
media = await anilistClient.alSearch(variables)
}
}
// check adult
if (!media) {
variables.isAdult = true
media = await anilistClient.alSearch(variables)
}
} catch (e) { }
// remove - :
const specialMatch = modified.match(/[-:]/g)
if (specialMatch) {
modified = modified.replace(/[-:]/g, '')
titles.push(modified)
}
if (media) this.animeNameCache[this.getCacheKeyForTitle(parseObject)] = media
// remove (TV)
const tvMatch = modified.match(/\(TV\)/)
if (tvMatch) {
modified = modified.replace('(TV)', '')
titles.push(modified)
}
return titles
}
// id keyed cache for anilist media
animeCache = {}
/**
* resolve anime name based on file name and store it
* @param {import('anitomyscript').AnitomyResult[]} parseObjects
*/
async findAnimesByTitle (parseObjects) {
const titleObjects = parseObjects.map(obj => {
const key = this.getCacheKeyForTitle(obj)
const titleObjects = this.alternativeTitles(obj.anime_title).map(title => ({ title, year: obj.anime_year, key, isAdult: false }))
titleObjects.push({ ...titleObjects.at(-1), isAdult: true })
return titleObjects
}).flat()
for (const [key, media] of await anilistClient.alSearchCompound(titleObjects)) {
this.animeNameCache[key] = media
}
}
// TODO: this should use global anime cache once that is create
/**
* @param {number} id
* @returns {any}
*/
getAnimeById (id) {
if (!this.animeCache[id]) this.animeCache[id] = anilistClient.searchIDSingle({ id })
async getAnimeById (id) {
if (anilistClient.mediaCache[id]) return anilistClient.mediaCache[id]
const res = await anilistClient.searchIDSingle({ id })
return this.animeCache[id]
return res.data.Media
}
// TODO: anidb aka true episodes need to be mapped to anilist episodes a bit better
// TODO: anidb aka true episodes need to be mapped to anilist episodes a bit better, shit like mushoku offsets caused by episode 0's in between seasons
/**
* @param {string | string[]} fileName
* @returns {Promise<any[]>}
*/
async resolveFileAnime (fileName) {
if (!fileName) return [{}]
const parseObjs = await anitomyscript(fileName)
// batches promises in 10 at a time, because of CF burst protection, which still sometimes gets triggered :/
@ -108,7 +103,7 @@ export default new class AnimeResolver {
if (key in this.animeNameCache) continue
uniq[key] = obj
}
await PromiseBatch(this.findAnimeByTitle.bind(this), Object.values(uniq), 10)
await this.findAnimesByTitle(Object.values(uniq))
const fileAnimes = []
for (const parseObj of parseObjs) {
@ -131,7 +126,7 @@ export default new class AnimeResolver {
// parent check is to break out of those incorrectly resolved OVA's
// if we used anime season to resolve anime name, then there's no need to march into prequel!
const prequel = !parseObj.anime_season && (this.findEdge(media, 'PREQUEL')?.node || ((media.format === 'OVA' || media.format === 'ONA') && this.findEdge(media, 'PARENT')?.node))
const root = prequel && (await this.resolveSeason({ media: (await this.getAnimeById(prequel.id)).data.Media, force: true })).media
const root = prequel && (await this.resolveSeason({ media: await this.getAnimeById(prequel.id), force: true })).media
// if highest value is bigger than episode count or latest streamed episode +1 for safety, parseint to math.floor a number like 12.5 - specials - in 1 go
const result = await this.resolveSeason({ media: root || media, episode: parseObj.episode_number[1], increment: !parseObj.anime_season ? null : true })
@ -148,7 +143,7 @@ export default new class AnimeResolver {
if (maxep && parseInt(parseObj.episode_number) > maxep) {
// see big comment above
const prequel = !parseObj.anime_season && (this.findEdge(media, 'PREQUEL')?.node || ((media.format === 'OVA' || media.format === 'ONA') && this.findEdge(media, 'PARENT')?.node))
const root = prequel && (await this.resolveSeason({ media: (await this.getAnimeById(prequel.id)).data.Media, force: true })).media
const root = prequel && (await this.resolveSeason({ media: await this.getAnimeById(prequel.id), force: true })).media
// value bigger than episode count
const result = await this.resolveSeason({ media: root || media, episode: parseInt(parseObj.episode_number), increment: !parseObj.anime_season ? null : true })
@ -212,7 +207,7 @@ export default new class AnimeResolver {
}
return obj
}
media = (await this.getAnimeById(edge.id)).data.Media
media = await this.getAnimeById(edge.id)
const highest = media.nextAiringEpisode?.episode || media.episodes

View file

@ -38,7 +38,6 @@ class Extensions {
/** @param {string[]} extensions */
export async function loadExtensions (extensions) {
// TODO: handle import errors
const sources = (await Promise.all(extensions.map(async extension => {
try {
if (!extension.startsWith('http')) extension = `https://esm.sh/${extension}`

View file

@ -62,7 +62,6 @@ export async function getRSSContent (url) {
class RSSMediaManager {
constructor () {
this.resultMap = {}
this.lastResult = null
}
getMediaForRSS (page, perPage, url, ignoreErrors) {
@ -101,7 +100,7 @@ class RSSMediaManager {
const targetPage = [...changed.content.querySelectorAll('item')].slice(index, index + perPage)
const items = parseRSSNodes(targetPage)
hasNextPage.value = items.length === perPage
const result = items.map(item => this.resolveAnimeFromRSSItem(item))
const result = this.structureResolveResults(items)
this.resultMap[url] = {
date: changed.pubDate,
result
@ -109,29 +108,26 @@ class RSSMediaManager {
return result
}
resolveAnimeFromRSSItem (item) {
this.lastResult = this.queueResolve(item)
return this.lastResult
}
async queueResolve ({ title, link, date }) {
await this.lastResult
const res = {
...(await AnimeResolver.resolveFileAnime(title))[0],
episodeData: undefined,
date: undefined,
onclick: undefined
}
if (res.media?.id) {
try {
res.episodeData = (await getEpisodeMetadataForMedia(res.media))?.[res.episode]
} catch (e) {
console.warn('failed fetching episode metadata', e)
async structureResolveResults (items) {
const results = await AnimeResolver.resolveFileAnime(items.map(item => item.title))
return results.map(async (result, i) => {
const res = {
...result,
episodeData: undefined,
date: undefined,
onclick: undefined
}
}
res.date = date
res.onclick = () => add(link)
return res
if (res.media?.id) {
try {
res.episodeData = (await getEpisodeMetadataForMedia(res.media))?.[res.episode]
} catch (e) {
console.warn('failed fetching episode metadata', e)
}
}
res.date = items[i].date
res.onclick = () => add(items[i].link)
return res
})
}
}

View file

@ -65,15 +65,15 @@ function createSections () {
...settings.value.rssFeedsNew.map(([title, url]) => {
const section = {
title,
load: (page = 1, perPage = 8) => RSSManager.getMediaForRSS(page, perPage, url),
preview: writable(RSSManager.getMediaForRSS(1, 8, url)),
load: (page = 1, perPage = 12) => RSSManager.getMediaForRSS(page, perPage, url),
preview: writable(RSSManager.getMediaForRSS(1, 12, url)),
variables: { disableSearch: true }
}
// update every 30 seconds
section.interval = setInterval(async () => {
if (await RSSManager.getContentChanged(1, 8, url)) {
section.preview.value = RSSManager.getMediaForRSS(1, 8, url, true)
if (await RSSManager.getContentChanged(1, 12, url)) {
section.preview.value = RSSManager.getMediaForRSS(1, 12, url, true)
}
}, 30000)

View file

@ -77,18 +77,6 @@ export function toTS (sec, full) {
if (seconds < 10) seconds = '0' + seconds
return (hours > 0 || full === 1 || full === 2) ? hours + ':' + minutes + ':' + seconds : minutes + ':' + seconds
}
export async function PromiseBatch (task, items, batchSize) {
let position = 0
let results = []
while (position < items.length) {
const itemsForBatch = items.slice(position, position + batchSize)
results = [...results, ...await Promise.all(itemsForBatch.map(item => task(item)))]
position += batchSize
}
return results
}
export function generateRandomHexCode (len) {
let hexCode = ''
@ -126,25 +114,6 @@ export function debounce (fn, time) {
}
}
export function binarySearch (arr, el) {
let left = 0
let right = arr.length - 1
while (left <= right) {
// Using bitwise or instead of Math.floor as it is slightly faster
const mid = ((right + left) / 2) | 0
if (arr[mid] === el) {
return true
} else if (el < arr[mid]) {
right = mid - 1
} else {
left = mid + 1
}
}
return false
}
export const defaults = {
volume: 1,
playerAutoplay: true,

View file

@ -83,7 +83,7 @@ module.exports = (parentDir, alias = {}, aliasFields = 'browser', filename = 'ap
<title>Miru</title>
<!-- <link rel="preconnect" href="https://www.youtube-nocookie.com"> -->
<link rel="preconnect" href="https://graphql.anilist.co">
<link rel="preconnect" href="https://graphql.anilist.co/">
<link rel='icon' href='/logo_filled.png' type="image/png" >
${htmlWebpackPlugin.tags.headTags}
</head>

View file

@ -1,6 +1,6 @@
{
"name": "Miru",
"version": "5.0.0",
"version": "5.0.1",
"private": true,
"author": "ThaUnknown_ <ThaUnknown@users.noreply.github.com>",
"description": "Stream anime torrents, real-time with no waiting for downloads.",

View file

@ -1,4 +1,4 @@
import { app, ipcMain, shell, dialog } from 'electron'
import { app, ipcMain, shell } from 'electron'
import store from './store.js'
export const development = process.env.NODE_ENV?.trim() === 'development'
@ -14,10 +14,8 @@ const flags = [
['enable-features', 'PlatformEncryptedDolbyVision,EnableDrDc,CanvasOopRasterization,ThrottleDisplayNoneAndVisibilityHiddenCrossOriginIframes,UseSkiaRenderer,WebAssemblyLazyCompilation'],
['force_high_performance_gpu'],
['disable-features', 'Vulkan,CalculateNativeWinOcclusion,WidgetLayering'],
['disable-color-correct-rendering'],
['autoplay-policy', 'no-user-gesture-required'], ['disable-notifications'], ['disable-logging'], ['disable-permissions-api'], ['no-sandbox'], ['no-zygote'],
['bypasscsp-schemes'],
['force-color-profile', 'srgb'] // TODO: should this be "scrgb-linear"?
['bypasscsp-schemes']
]
for (const [flag, value] of flags) {
app.commandLine.appendSwitch(flag, value)