feat: add seadex mappings

fix: increase amt of tosho results
feat: new styling on rss view
This commit is contained in:
ThaUnknown 2024-01-02 03:04:59 +01:00
parent 686d1aceda
commit 71cd8e9157
10 changed files with 132 additions and 86 deletions

3
.gitignore vendored
View file

@ -6,10 +6,11 @@ dist/
build/
# Dependencies
node_modules/
node_modules
**/*.der
**/*.pem
**/*.keystore
**/*.jks
**/node_modules
package-lock.json

View file

@ -17,5 +17,5 @@
"types": ["./types.d.ts"],
"allowSyntheticDefaultImports": true
},
"exclude": ["node_modules", "dist", "build", "git_modules", "**/node_modules", "**/dist", "**/build", "**/git_modules", "**/node_modules/**", "**/dist/**", "**/build/**", "**/git_modules/**"]
"exclude": ["node_modules", "dist", "build", "git_modules", ".svelte-kit", "**/node_modules", "**/dist", "**/build", "**/git_modules", "**/.svelte-kit", "**/node_modules/*", "**/dist/*", "**/build/*", "**/git_modules/*", "**/.svelte-kit/*"]
}

View file

@ -1,4 +1,4 @@
import { DOMPARSER, PromiseBatch, binarySearch } from './util.js'
import { DOMPARSER, PromiseBatch } from './util.js'
import { alRequest, alSearch } from './anilist.js'
import _anitomyscript from 'anitomyscript'
import { toast } from 'svelte-sonner'
@ -439,23 +439,3 @@ export async function getEpisodeMetadataForMedia (media) {
episodeMetadataMap[media.id] = episodes
return episodes
}
let seadex = []
requestIdleCallback(async () => {
const res = await fetch('https://sneedex.moe/api/public/nyaa')
const json = await res.json()
seadex = json.flatMap(({ nyaaIDs }) => nyaaIDs).sort((a, b) => a - b) // sort for binary search
})
export function mapBestRelease (entries) {
return entries.map(entry => {
if (entry.id) {
if (entry.id === '?') return entry
if (binarySearch(seadex, entry.id)) entry.best = true
return entry
}
const match = entry.link.match(/\d+/i)
if (match && binarySearch(seadex, Number(match[0]))) entry.best = true
return entry
})
}

View file

@ -0,0 +1,24 @@
import { fastPrettyBytes } from '../util.js'
export default async function (media) {
const res = await fetch(`https://beta.releases.moe/api/collections/entries/records?page=1&perPage=1&filter=alID%3D%22${media.id}%22&skipTotal=1&expand=trs`)
const { items } = await res.json()
if (!items[0]?.expand?.trs?.length) return []
const { trs } = items[0]?.expand
return trs.filter(({ infoHash }) => infoHash !== '<redacted>').map(torrent => {
return {
hash: torrent.infoHash,
link: torrent.infoHash,
title: `[${torrent.releaseGroup}] ${media.title.userPreferred}`,
size: fastPrettyBytes(torrent.files.reduce((prev, curr) => prev + curr.length, 0)),
type: torrent.isBest ? 'best' : 'alt',
date: new Date(torrent.created),
parseObject: {
audio_term: [torrent.dualAudio && 'DUALAUDIO']
}
}
})
}

View file

@ -0,0 +1,21 @@
import { binarySearch } from '../util.js'
let seadex = []
requestIdleCallback(async () => {
const res = await fetch('https://sneedex.moe/api/public/nyaa')
const json = await res.json()
seadex = json.flatMap(({ nyaaIDs }) => nyaaIDs).sort((a, b) => a - b) // sort for binary search
})
export default function (entries) {
return entries.map(entry => {
if (entry.id) {
if (entry.id === '?') return entry
if (binarySearch(seadex, entry.id)) entry.type = 'alt'
return entry
}
const match = entry.link.match(/\d+/i)
if (match && binarySearch(seadex, Number(match[0]))) entry.type = 'alt'
return entry
})
}

View file

@ -1,31 +1,33 @@
import { mapBestRelease, anitomyscript } from '../anime.js'
import { anitomyscript } from '../anime.js'
import { fastPrettyBytes, sleep } from '../util.js'
import { exclusions } from '../rss.js'
import { settings } from '@/modules/settings.js'
import { alRequest } from '../anilist.js'
import { client } from '@/modules/torrent.js'
import mapBestSneedexReleases from './sneedex.js'
import getSeedexBests from './seadex.js'
export default async function ({ media, episode }) {
const json = await getAniDBFromAL(media)
if (typeof json !== 'object') throw new Error(json || 'No mapping found.')
const aniDBEpisode = await getAniDBEpisodeFromAL({ media, episode }, json)
const movie = isMovie(media) // don't query movies with qualities, to allow 4k
let entries = await getToshoEntries(media, aniDBEpisode, json, !movie && settings.value.rssQuality)
const aniDBEpisode = await getAniDBEpisodeFromAL({ media, episode }, json)
let entries = await getToshoEntriesForMedia(media, aniDBEpisode, json, !movie && settings.value.rssQuality)
if (!entries.length && !movie) entries = await getToshoEntriesForMedia(media, aniDBEpisode, json)
if (!entries?.length) throw new Error('No entries found.')
if (!entries.length && !movie) entries = await getToshoEntries(media, aniDBEpisode, json)
const deduped = dedupeEntries(entries)
const parseObjects = await anitomyscript(deduped.map(({ title }) => title))
for (const i in parseObjects) deduped[i].parseObject = parseObjects[i]
const mapped = mapBestRelease(mapTosho2dDeDupedEntry(entries))
const withBests = dedupeEntries([...await getSeedexBests(media), ...mapBestSneedexReleases(deduped)])
if (!mapped?.length) throw new Error('No entries found.')
const parseObjects = await anitomyscript(mapped.map(({ title }) => title))
for (const i in parseObjects) mapped[i].parseObject = parseObjects[i]
return updatePeerCounts(withBests)
}
async function updatePeerCounts (entries) {
const id = crypto.randomUUID()
const updated = await Promise.race([
@ -37,19 +39,18 @@ export default async function ({ media, episode }) {
console.log(detail)
}
client.on('scrape', check)
client.send('scrape', { id, infoHashes: mapped.map(({ hash }) => hash) })
client.send('scrape', { id, infoHashes: entries.map(({ hash }) => hash) })
}),
sleep(5000)
])
for (const { hash, complete, downloaded, incomplete } of updated || []) {
const found = mapped.find(mapped => mapped.hash === hash)
const found = entries.find(mapped => mapped.hash === hash)
found.downloads = downloaded
found.leechers = incomplete
found.seeders = complete
}
return mapped
return entries
}
async function getAniDBFromAL (media) {
@ -61,7 +62,6 @@ async function getAniDBFromAL (media) {
console.log('failed getting AniDB ID, checking via parent')
const parentID = getParentForSpecial(media)
if (!parentID) return
console.log('found via parent')
@ -120,7 +120,7 @@ export function getEpisodeNumberByAirDate (alDate, episodes, episode) {
})
}
async function getToshoEntries (media, episode, { mappings }, quality) {
async function getToshoEntriesForMedia (media, episode, { mappings }, quality) {
const promises = []
if (episode) {
@ -128,7 +128,7 @@ async function getToshoEntries (media, episode, { mappings }, quality) {
console.log('fetching episode', anidbEid, quality)
promises.push(fetchSingleEpisode({ id: anidbEid, quality }))
promises.push(fetchSingleEpisodeForAnidb({ id: anidbEid, quality }))
} else {
// TODO: look for episodes via.... title?
}
@ -136,7 +136,7 @@ async function getToshoEntries (media, episode, { mappings }, quality) {
// look for batches and movies
const movie = isMovie(media)
if (mappings.anidb_id && media.status === 'FINISHED' && (movie || media.episodes !== 1)) {
promises.push(fetchBatches({ episodeCount: media.episodes, id: mappings.anidb_id, quality, movie }))
promises.push(fetchBatchesForAnidb({ episodeCount: media.episodes, id: mappings.anidb_id, quality, movie }))
console.log('fetching batch', quality, movie)
if (!movie) {
const courRelation = getSplitCourRelation(media)
@ -147,7 +147,7 @@ async function getToshoEntries (media, episode, { mappings }, quality) {
const mappingsResponse = await fetch('https://api.ani.zip/mappings?anilist_id=' + courRelation.id)
const json = await mappingsResponse.json()
console.log('found mappings for split cour', !!json.mappings.anidb_id)
if (json.mappings.anidb_id) promises.push(fetchBatches({ episodeCount, id: json.mappings.anidb_id, quality }))
if (json.mappings.anidb_id) promises.push(fetchBatchesForAnidb({ episodeCount, id: json.mappings.anidb_id, quality }))
} catch (e) {
console.error('failed getting split-cour data', e)
}
@ -155,7 +155,7 @@ async function getToshoEntries (media, episode, { mappings }, quality) {
}
}
return (await Promise.all(promises)).flat()
return mapToshoEntries((await Promise.all(promises)).flat())
}
function getSplitCourRelation (media) {
@ -233,28 +233,30 @@ function isMovie (media) {
return media.duration > 80 && media.episodes === 1
}
function buildQuery (quality) {
const QUALITIES = ['1080', '720', '540', '480']
const ANY = 'e*|a*|r*|i*|o*'
function buildToshoQuery (quality) {
let query = `&qx=1&q=!("${exclusions.join('"|"')}")`
if (quality) {
query += ` "${quality}"`
query += `((${ANY}|"${quality}") !"${QUALITIES.filter(q => q !== quality).join('" !"')}")`
} else {
query += 'e*|a*|r*|i*|o*' // HACK: tosho NEEDS a search string, so we lazy search a single common vowel
query += ANY // HACK: tosho NEEDS a search string, so we lazy search a single common vowel
}
return query
}
async function fetchBatches ({ episodeCount, id, quality, movie = null }) {
async function fetchBatchesForAnidb ({ episodeCount, id, quality, movie = null }) {
try {
const queryString = buildQuery(quality)
const queryString = buildToshoQuery(quality)
const torrents = await fetch(settings.value.toshoURL + 'json?order=size-d&aid=' + id + queryString)
// safe if AL includes EP 0 or doesn't
// safe both if AL includes EP 0 or doesn't
const batches = (await torrents.json()).filter(entry => entry.num_files >= episodeCount)
if (!movie) {
for (const batch of batches) {
batch.batch = true
}
for (const batch of batches) batch.type = 'batch'
}
console.log({ batches })
return batches
@ -264,9 +266,9 @@ async function fetchBatches ({ episodeCount, id, quality, movie = null }) {
}
}
async function fetchSingleEpisode ({ id, quality }) {
async function fetchSingleEpisodeForAnidb ({ id, quality }) {
try {
const queryString = buildQuery(quality)
const queryString = buildToshoQuery(quality)
const torrents = await fetch(settings.value.toshoURL + 'json?eid=' + id + queryString)
const episodes = await torrents.json()
@ -278,33 +280,41 @@ async function fetchSingleEpisode ({ id, quality }) {
}
}
function mapTosho2dDeDupedEntry (entries) {
function mapToshoEntries (entries) {
return entries.map(entry => {
return {
title: entry.title || entry.torrent_name,
link: entry.magnet_uri,
id: entry.nyaa_id, // TODO: used for sneedex mappings, remove later
seeders: entry.seeders >= 30000 ? 0 : entry.seeders,
leechers: entry.leechers >= 30000 ? 0 : entry.leechers,
downloads: entry.torrent_downloaded_count,
hash: entry.info_hash,
size: entry.total_size && fastPrettyBytes(entry.total_size),
verified: !!entry.anidb_fid,
type: entry.type,
date: entry.timestamp && new Date(entry.timestamp * 1000)
}
})
}
function dedupeEntries (entries) {
const deduped = {}
for (const entry of entries) {
if (deduped[entry.info_hash]) {
const dupe = deduped[entry.info_hash]
dupe.title ??= entry.title || entry.torrent_name
dupe.id ||= entry.nyaa_id
if (deduped[entry.hash]) {
const dupe = deduped[entry.hash]
dupe.title ??= entry.title
dupe.link ??= entry.link
dupe.id ||= entry.id
dupe.seeders ||= entry.seeders >= 30000 ? 0 : entry.seeders
dupe.leechers ||= entry.leechers >= 30000 ? 0 : entry.leechers
dupe.downloads ||= entry.torrent_downloaded_count
dupe.size ||= entry.total_size && fastPrettyBytes(entry.total_size)
dupe.verified ||= !!entry.anidb_fid
dupe.date ||= entry.timestamp && new Date(entry.timestamp * 1000)
dupe.downloads ||= entry.downloads
dupe.size ||= entry.size
dupe.verified ||= entry.verified
dupe.date ||= entry.date
dupe.type ??= entry.type
} else {
deduped[entry.info_hash] = {
title: entry.title || entry.torrent_name,
link: entry.magnet_uri,
id: entry.nyaa_id,
seeders: entry.seeders >= 30000 ? 0 : entry.seeders,
leechers: entry.leechers >= 30000 ? 0 : entry.leechers,
downloads: entry.torrent_downloaded_count,
hash: entry.info_hash,
size: entry.total_size && fastPrettyBytes(entry.total_size),
verified: !!entry.anidb_fid,
batch: entry.batch,
date: entry.timestamp && new Date(entry.timestamp * 1000)
}
deduped[entry.hash] = entry
}
}

View file

@ -148,17 +148,21 @@
</thead>
<tbody class='pointer'>
{#each filtered as row}
<tr class='border-0' class:text-secondary={row.best} use:click={() => play(row)}>
<tr class='border-0' class:text-secondary={row.type === 'best'} class:text-danger={row.type === 'alt'} class:text-success={!row.type && row.verified} use:click={() => play(row)}>
<td class='py-10 pl-20 pr-0'>
{#if row.best}
{#if row.type === 'best'}
<div class='material-symbols-outlined font-size-24 symbol-bold' title='Best Release'>
star
</div>
{:else if row.type === 'alt'}
<div class='material-symbols-outlined font-size-24 symbol-bold' title='Alt Release'>
star
</div>
{:else if row.verified}
<div class='text-success material-symbols-outlined font-size-24 symbol-bold' title='Verified'>
<div class='material-symbols-outlined font-size-24 symbol-bold' title='Verified'>
verified
</div>
{:else if row.batch}
{:else if row.type === 'batch'}
<div class='text-light material-symbols-outlined font-size-24 symbol-bold' title='Batch'>
database
</div>
@ -177,7 +181,7 @@
<td class='py-10 px-20'>{row.seeders ?? '?'}</td>
<td class='py-10 px-20'>{row.leechers ?? '?'}</td>
<td class='py-10 px-20'>{row.downloads ?? '?'}</td>
<td class='py-10 px-20 text-nowrap'>{since(row.date)}</td>
<td class='py-10 px-20 text-nowrap'>{row.date ? since(row.date) : '?'}</td>
</tr>
{/each}
</tbody>

View file

@ -16,7 +16,8 @@
<select class='form-control bg-dark w-300 mw-full' bind:value={settings.rssQuality}>
<option value='1080' selected>1080p</option>
<option value='720'>720p</option>
<option value='480||540'>SD</option>
<option value='540'>540p</option>
<option value='480'>480p</option>
<option value="">None</option>
</select>
</SettingCard>

View file

@ -14,7 +14,7 @@
"paths": {
"@/*": ["./common/*"],
},
"types": ["@cloudflare/workers-types"],
"types": ["@cloudflare/workers-types", "./types.d.ts"],
},
"exclude": ["node_modules", "dist", "build", "git_modules", "**/node_modules", "**/dist", "**/build", "**/git_modules", "**/node_modules/**", "**/dist/**", "**/build/**", "**/git_modules/**"]
"exclude": ["node_modules", "dist", "build", "git_modules", ".svelte-kit", "**/node_modules", "**/dist", "**/build", "**/git_modules", "**/.svelte-kit", "**/node_modules/*", "**/dist/*", "**/build/*", "**/git_modules/*", "**/.svelte-kit/*"]
}

5
types.d.ts vendored Normal file
View file

@ -0,0 +1,5 @@
import type { SvelteComponentTyped } from 'svelte'
declare module '*.svelte' {
export default SvelteComponentTyped
}