Compare commits

...

126 commits

Author SHA1 Message Date
Stratuma
15067b19a4
Merge pull request #1181 from ektatas/fix-useless-first-comma
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
fix first comma when select all episodes
2026-01-11 01:09:42 +01:00
ektatas
3bb33819a7
fix first comma when select all episodes 2026-01-03 15:28:04 +01:00
stratumadev
51b4c173ab chore(crunchy): update basic auth token and user-agent
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-windows-arm64 (push) Has been cancelled
Style and build test / build-test-linux-arm64 (push) Has been cancelled
Style and build test / build-test-macos-arm64 (push) Has been cancelled
Style and build test / build-test-windows-x64 (push) Has been cancelled
Style and build test / build-test-linux-x64 (push) Has been cancelled
Style and build test / build-test-macos-x64 (push) Has been cancelled
2025-12-21 20:20:55 +01:00
stratumadev
46f352af8c chore(main): update packages
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-windows-arm64 (push) Has been cancelled
Style and build test / build-test-linux-arm64 (push) Has been cancelled
Style and build test / build-test-macos-arm64 (push) Has been cancelled
Style and build test / build-test-windows-x64 (push) Has been cancelled
Style and build test / build-test-linux-x64 (push) Has been cancelled
Style and build test / build-test-macos-x64 (push) Has been cancelled
2025-12-18 11:01:35 +01:00
stratumadev
c9aa27df23 chore(main): update packages
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-windows-arm64 (push) Has been cancelled
Style and build test / build-test-linux-arm64 (push) Has been cancelled
Style and build test / build-test-macos-arm64 (push) Has been cancelled
Style and build test / build-test-windows-x64 (push) Has been cancelled
Style and build test / build-test-linux-x64 (push) Has been cancelled
Style and build test / build-test-macos-x64 (push) Has been cancelled
2025-12-07 12:00:26 +01:00
stratumadev
a42dafe608 chore(gui): update express
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-windows-arm64 (push) Has been cancelled
Style and build test / build-test-linux-arm64 (push) Has been cancelled
Style and build test / build-test-macos-arm64 (push) Has been cancelled
Style and build test / build-test-windows-x64 (push) Has been cancelled
Style and build test / build-test-linux-x64 (push) Has been cancelled
Style and build test / build-test-macos-x64 (push) Has been cancelled
2025-12-02 01:49:47 +01:00
stratumadev
61cf5a59a6 chore(main): update express and yaml 2025-12-02 01:45:27 +01:00
stratumadev
e8bec44982 perf(merger): remove unused functions and imports
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-windows-arm64 (push) Has been cancelled
Style and build test / build-test-linux-arm64 (push) Has been cancelled
Style and build test / build-test-macos-arm64 (push) Has been cancelled
Style and build test / build-test-windows-x64 (push) Has been cancelled
Style and build test / build-test-linux-x64 (push) Has been cancelled
Style and build test / build-test-macos-x64 (push) Has been cancelled
2025-11-30 03:16:06 +01:00
stratumadev
1ff93f2fbd refactor(merger): move from ffprobe to mediainfo
Removes the necessity of ffprobe completely.
Also removed the mention of ffprobe requirement from the readme.
2025-11-30 03:10:28 +01:00
stratumadev
e4afedfc9c chore(args): remove unwanted console.log 2025-11-30 01:52:40 +01:00
stratumadev
aa1180df48 fix(args): transformer also applies to default values 2025-11-30 01:51:00 +01:00
stratumadev
429bb2d690 fix(args): resolve issue with short flag parsing in overrideArguments function
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-28 22:22:00 +01:00
stratumadev
c9ca51e6ef chore(main): remove unused/outdated files and move to typescript only
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-28 13:49:27 +01:00
stratumadev
7d828a3d47 docs(main): bump version 2025-11-28 12:35:41 +00:00
stratumadev
0bb757f655 chore(main): bump version 2025-11-28 13:34:21 +01:00
stratumadev
1bf0af08e6 fix(args): correct parsing so string options don't treat the next option as a value 2025-11-28 13:30:55 +01:00
stratumadev
aed9169a69 Merge branch 'master' of https://github.com/anidl/multi-downloader-nx
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-28 00:49:53 +01:00
stratumadev
422af6a46c chore(workflow): update commit message of auto-documentation 2025-11-28 00:49:47 +01:00
stratumadev
f22f73dbf1 chore(main): bump version + Documentation 2025-11-27 23:46:09 +00:00
stratumadev
749285d7d1 chore(main): bump version 2025-11-28 00:44:46 +01:00
stratumadev
8764e608dc feat(crunchy): add subtitleTimestampFix function (#1121)
Fix Crunchyroll subtitles with invalid durations.
If start > video length  the line is deleted.
If only end > video length  end is trimmed to video duration. + Documentation
2025-11-27 23:41:39 +00:00
stratumadev
5f192a31c0 feat(crunchy): add subtitleTimestampFix function (#1121)
Fix Crunchyroll subtitles with invalid durations.
If start > video length  the line is deleted.
If only end > video length  end is trimmed to video duration.
2025-11-28 00:40:11 +01:00
stratumadev
045a439b82 fix(m3u8): invalid m3u8-parser import 2025-11-27 22:32:52 +01:00
stratumadev
cd530295a8 chore: remove prettier & eslint commands from precommit test 2025-11-27 22:11:00 +01:00
stratumadev
289265652e chore: add husky for cleaner commits 2025-11-27 22:10:00 +01:00
stratumadev
88e06bbf6e chore: update dependencies 2025-11-27 21:51:07 +01:00
stratumadev
a70521ced7 increased default part download retries 2025-11-27 21:43:12 +01:00
stratumadev
37cac7c789 updated workflows
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-27 15:08:30 +01:00
stratumadev
6f58f3474e Merge pull request #1146 from HyperNylium/master
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
Update get started guide to include new CDM requirements + Documentation
2025-11-27 10:44:55 +00:00
Stratuma
15e301a965
Merge pull request #1146 from HyperNylium/master
Update get started guide to include new CDM requirements
2025-11-27 11:44:40 +01:00
stratumadev
c4e2be1009 bump version 2025-11-27 11:43:24 +01:00
stratumadev
3d5142540b moved back to commander and fixed React error 2025-11-27 11:42:55 +01:00
AnimeDL
c38dd20904 Temporarily revert to yargs + Documentation
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-27 02:57:21 +00:00
AnimeDL
5cacc090ef Temporarily revert to yargs 2025-11-26 18:56:53 -08:00
HyperNylium
4cbcc24a75
Update GET-STARTED.md 2025-11-26 20:41:39 -05:00
HyperNylium
84965dcc85
Update GET-STARTED.md 2025-11-26 20:38:22 -05:00
HyperNylium
1716183678
Update GET-STARTED.md 2025-11-26 19:40:50 -05:00
HyperNylium
71563f5778
Update GET-STARTED.md 2025-11-26 19:09:48 -05:00
stratumadev
d3479daf8d Fixed camel case problem 2025-11-26 21:24:13 +01:00
stratumadev
07cceb7928 moved to better obfuscated crunchyroll UA
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-26 17:52:48 +01:00
stratumadev
8e267c8e0a forgot a couple of new urls 2025-11-26 17:38:25 +01:00
stratumadev
160c8a1cb4 reverted crunchyroll to legacy api 2025-11-26 17:08:47 +01:00
stratumadev
ea53e9071f removed unused imports
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-26 04:30:29 +01:00
stratumadev
9cc0ddc193 removed unnecessary packages for better integrity 2025-11-26 04:17:21 +01:00
stratumadev
96cec167e1 temporarily disabled crunchyroll turnstile handling
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-26 03:12:10 +01:00
stratumadev
36811d9a7c added .wvd cdm and .prd v2/v3 cdm support
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-windows-arm64 (push) Has been cancelled
Style and build test / build-test-linux-arm64 (push) Has been cancelled
Style and build test / build-test-macos-arm64 (push) Has been cancelled
Style and build test / build-test-windows-x64 (push) Has been cancelled
Style and build test / build-test-linux-x64 (push) Has been cancelled
Style and build test / build-test-macos-x64 (push) Has been cancelled
2025-11-24 12:26:36 +01:00
stratumadev
9f7a2aaf79 fixed tsc error
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-windows-arm64 (push) Has been cancelled
Style and build test / build-test-linux-arm64 (push) Has been cancelled
Style and build test / build-test-macos-arm64 (push) Has been cancelled
Style and build test / build-test-windows-x64 (push) Has been cancelled
Style and build test / build-test-linux-x64 (push) Has been cancelled
Style and build test / build-test-macos-x64 (push) Has been cancelled
2025-11-22 23:20:51 +01:00
stratumadev
13333e2bd1 rolled back eslint version, moved to new widevine module 2025-11-22 23:19:09 +01:00
stratumadev
ee6b92909a updated packages
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-21 22:45:03 +01:00
stratumadev
c66d3e12ad updated packages
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-21 14:47:23 +01:00
stratumadev
f490682de9 updated packages 2025-11-21 11:09:15 +01:00
stratumadev
2fe04cf8a6 Merge branch 'master' of https://github.com/anidl/multi-downloader-nx
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-20 23:48:55 +01:00
Stratuma
3e0c7acd9c
Merge pull request #1135 from HyperNylium/master
Update playready section of get started guide
2025-11-20 23:49:27 +01:00
stratumadev
141b9f350a updated playready package 2025-11-20 23:48:54 +01:00
David Pivik
b18942753d
Revise conversion instructions for CDM blobs
Updated instructions for converting .prd files to blobs and clarified the process for using multi-downloader-nx.
2025-11-20 15:58:44 -05:00
stratumadev
26ef04a436 added automatic .prd file unpacking into bgroupcert and zgpriv 2025-11-20 21:41:31 +01:00
HyperNylium
cee1cbad80
Update version 2025-11-19 23:50:58 -05:00
HyperNylium
005ff557d0
Update playready section
Update for new playready client requirements
2025-11-19 23:49:38 -05:00
stratumadev
d0cc551b8d added new playready client
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-20 04:42:41 +01:00
Stratuma
51cb97e18f
Merge pull request #1134 from HyperNylium/master
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
Fix shaka-packager URL in get started guide
2025-11-19 22:58:52 +01:00
HyperNylium
79f17e1a35
Fix shaka URL
Forgot to update this one lol. whoops...
2025-11-19 16:47:25 -05:00
stratumadev
76769f42c8 Merge branch 'master' of https://github.com/anidl/multi-downloader-nx
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-19 15:12:45 +01:00
stratumadev
e521aae63b bumped version + Documentation 2025-11-19 14:13:03 +00:00
stratumadev
0276d9ca8b removed unused commander action handler 2025-11-19 15:12:44 +01:00
stratumadev
50ce84105e bumped version 2025-11-19 15:11:40 +01:00
stratumadev
a529740552 updated args logic and workflows
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-19 00:19:00 +01:00
stratumadev
b54c984bab disabled android builds
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-windows-arm64 (push) Blocked by required conditions
Style and build test / build-test-linux-arm64 (push) Blocked by required conditions
Style and build test / build-test-macos-arm64 (push) Blocked by required conditions
Style and build test / build-test-windows-x64 (push) Blocked by required conditions
Style and build test / build-test-linux-x64 (push) Blocked by required conditions
Style and build test / build-test-macos-x64 (push) Blocked by required conditions
2025-11-18 14:17:33 +01:00
stratumadev
84dc392180 updated test workflow 2025-11-18 13:58:53 +01:00
stratumadev
cd508c25c9 updated workflow node version 2025-11-18 13:44:35 +01:00
stratumadev
207ae3d820 updated gui packages and node version 2025-11-18 13:43:08 +01:00
stratumadev
0f0eaf1cfa updated packages 2025-11-18 13:29:05 +01:00
stratumadev
debbc2dd74 bumped version + Documentation
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
2025-11-15 19:17:25 +00:00
stratumadev
bac75f1c0a bumped version 2025-11-15 20:16:04 +01:00
stratumadev
928b2bc95d fixed more rage errors if no image array crunchyroll 2025-11-15 20:14:30 +01:00
stratumadev
f90676c81e fixed rage error if no image array in episode object 2025-11-15 20:12:42 +01:00
stratumadev
c57290b331 updated packages
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-arm64 (push) Blocked by required conditions
Style and build test / build-test-x64 (push) Blocked by required conditions
2025-11-15 02:09:27 +01:00
stratumadev
76282d5e09 fixed crunchyroll empty season error (new id switch)
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
2025-11-12 16:54:10 +01:00
stratumadev
7650ad91ee updated packages 2025-11-12 16:31:41 +01:00
stratumadev
3c6a89f0df lint
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
2025-11-08 23:30:43 +01:00
stratumadev
d275c24e9b updated packages 2025-11-08 23:28:57 +01:00
stratumadev
1667e5e482 Merge branch 'master' of https://github.com/anidl/multi-downloader-nx 2025-11-08 23:28:11 +01:00
stratumadev
bed0b6a918 fixed playready wrm header extraction from pssh 2025-11-08 23:28:09 +01:00
stratumadev
b715715b91 updated packages + bumped version + Documentation
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
2025-11-04 12:04:39 +00:00
stratumadev
1a192d2192 updated packages + bumped version 2025-11-04 13:03:17 +01:00
stratumadev
e5b7d5d3d5 Normalize Russian ASS Style (thx to
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
PystoyPlayer)
2025-11-02 18:16:13 +01:00
stratumadev
61d062a846 updated crunchyroll UA and basic token 2025-11-02 18:12:04 +01:00
stratumadev
dc852b4210 updated packages
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-arm64 (push) Blocked by required conditions
Style and build test / build-test-x64 (push) Blocked by required conditions
2025-11-01 21:13:18 +01:00
stratumadev
a9c558f02f updated packages
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-arm64 (push) Blocked by required conditions
Style and build test / build-test-x64 (push) Blocked by required conditions
2025-10-31 21:12:19 +01:00
stratumadev
2d906cfffa updated packages
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
2025-10-29 14:47:19 +01:00
stratumadev
dbb921077c updated crunchyroll header
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
2025-10-26 02:00:53 +01:00
stratumadev
e599b60592 updated packages and bumped version + Documentation 2025-10-26 00:53:47 +00:00
stratumadev
6757015d0b updated packages and bumped version 2025-10-26 02:52:24 +02:00
Stratuma
8ff257f2e8
Merge pull request #1106 from HyperNylium/master
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
Add endpoint notes section to get started guide
2025-10-20 10:12:32 +02:00
HyperNylium
4ac1c2e02a
Update GET-STARTED.md 2025-10-19 16:02:17 -04:00
HyperNylium
8c8a74d225
Add 2 stream warning and re-word some things 2025-10-19 15:57:24 -04:00
HyperNylium
f7256d55fc
Update version 2025-10-19 14:56:49 -04:00
HyperNylium
7a9c295216
Add endpoint notes section 2025-10-19 14:56:08 -04:00
stratumadev
374227dfec Merge branch 'master' of https://github.com/anidl/multi-downloader-nx
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-arm64 (push) Blocked by required conditions
Style and build test / build-test-x64 (push) Blocked by required conditions
2025-10-19 17:58:26 +02:00
stratumadev
b8be0f6d68 updated packages 2025-10-19 17:58:25 +02:00
stratumadev
8592336941 updated docs + Documentation 2025-10-19 15:54:47 +00:00
stratumadev
a5e63f6a44 updated docs 2025-10-19 17:53:35 +02:00
Stratuma
a5c8d9019c
Merge pull request #1105 from HyperNylium/master
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-arm64 (push) Blocked by required conditions
Style and build test / build-test-x64 (push) Blocked by required conditions
Update get started guide's Playready section
2025-10-18 22:31:17 +02:00
HyperNylium
f2c358e59e
Include latest link 2025-10-18 11:43:22 -04:00
HyperNylium
327cd97330
oops 2025-10-18 11:42:29 -04:00
HyperNylium
100ec404de
Update get started guide's Playready section
Update the Playready part of the guide to include Stratuma's patched version of shaka-packager and where to download it.
2025-10-18 11:41:10 -04:00
Stratuma
e6e3e3ddbb
Update shaka-packager link in README.md
Some checks are pending
auto-documentation / documentation (push) Waiting to run
build and push docker image / build-node (push) Waiting to run
Style and build test / tsc (push) Waiting to run
Style and build test / eslint (push) Blocked by required conditions
Style and build test / prettier (push) Blocked by required conditions
Style and build test / build-test-arm64 (push) Blocked by required conditions
Style and build test / build-test-x64 (push) Blocked by required conditions
2025-10-18 14:03:40 +02:00
stratumadev
8342fe0d9e updated packages
Some checks failed
auto-documentation / documentation (push) Has been cancelled
build and push docker image / build-node (push) Has been cancelled
Style and build test / tsc (push) Has been cancelled
Style and build test / eslint (push) Has been cancelled
Style and build test / prettier (push) Has been cancelled
Style and build test / build-test-arm64 (push) Has been cancelled
Style and build test / build-test-x64 (push) Has been cancelled
2025-10-14 16:27:57 +02:00
stratumadev
ade23664b0 additional crunchyroll audio hardsub fix 2025-10-14 16:27:26 +02:00
stratumadev
9cec96a4b3 hotfix crunchyroll audio no hardsub playlist 2025-10-14 16:25:13 +02:00
stratumadev
d44189bdde removed socks from validator and docs + Documentation 2025-10-11 18:59:11 +00:00
stratumadev
05697caae8 removed socks from validator and docs 2025-10-11 20:58:07 +02:00
stratumadev
2f292b4363 readded docker building on push 2025-10-11 01:53:17 +02:00
stratumadev
e623e5d2f7 update workflow 2 2025-10-11 01:48:57 +02:00
stratumadev
5a29169162 updated release matrix 2025-10-11 01:47:53 +02:00
stratumadev
ee82fbf5c5 updated hidive episodes list console log 2025-10-11 00:39:32 +02:00
stratumadev
e59ffd7f24 updated cleanupFilename 2025-10-11 00:18:55 +02:00
Stratuma
aa8b93276d
Update README.md 2025-10-10 23:47:12 +02:00
stratumadev
a3593986af added dynamic bin-path.yml and gui fixes 2025-10-10 23:43:28 +02:00
stratumadev
77d2086df0 added skipMuxOnSubFail flag + Documentation 2025-10-10 20:49:30 +00:00
stratumadev
90931e44f4 added skipMuxOnSubFail flag 2025-10-10 22:48:27 +02:00
stratumadev
6847b36961 updated gui packages 2025-10-10 16:21:52 +02:00
stratumadev
9a3aacc8b6 added proxy support + Documentation 2025-10-10 13:58:58 +00:00
stratumadev
debc2876d6 added proxy support 2025-10-10 15:57:57 +02:00
stratumadev
6b3fd97f56 version bump + Documentation 2025-10-10 11:31:02 +00:00
stratumadev
b17a72f99a version bump 2025-10-10 13:30:00 +02:00
stratumadev
d58c14c159 updated docker release workflow 2025-10-10 11:20:46 +02:00
62 changed files with 3663 additions and 7900 deletions

6
.commitlintrc.json Normal file
View file

@ -0,0 +1,6 @@
{
"extends": ["@commitlint/config-conventional"],
"rules": {
"type-enum": [2, "always", ["ci", "chore", "docs", "ticket", "feat", "fix", "perf", "refactor", "revert", "style"]]
}
}

View file

@ -4,23 +4,26 @@ on:
push:
branches: [master]
permissions:
contents: write
jobs:
documentation:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- uses: pnpm/action-setup@v2
- uses: pnpm/action-setup@v4
with:
version: latest
- name: Use Node.js
uses: actions/setup-node@v3
with:
node-version: 22
node-version: 24
- run: pnpm i
- run: pnpm run docs
- uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: ${{ github.event.head_commit.message }} + Documentation
commit_message: 'docs(main): bump version'

35
.github/workflows/docker.yml vendored Normal file
View file

@ -0,0 +1,35 @@
# This workflow will build a Node project with Docker
name: build and push docker image
on:
push:
branches: [master]
workflow_dispatch:
permissions:
contents: read
jobs:
build-node:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
if: ${{ github.ref == 'refs/heads/master' }}
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push Docker images
uses: docker/build-push-action@v2.9.0
with:
github-token: ${{ github.token }}
push: ${{ github.ref == 'refs/heads/master' }}
tags: |
"multidl/multi-downloader-nx:latest"
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View file

@ -4,6 +4,9 @@ on:
release:
types: [published]
permissions:
contents: write
jobs:
build:
strategy:
@ -52,10 +55,6 @@ jobs:
build_type: linux
build_arch: arm64
gui: cli
- os: ubuntu-latest
build_type: android
build_arch: armv7
gui: cli
- os: ubuntu-latest
build_type: linux
build_arch: x64
@ -64,10 +63,6 @@ jobs:
build_type: linux
build_arch: arm64
gui: gui
- os: ubuntu-latest
build_type: android
build_arch: armv7
gui: gui
# ALPINE
- os: ubuntu-latest
build_type: alpine
@ -88,14 +83,14 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: pnpm/action-setup@v2
uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 22
node-version: 24
check-latest: true
- name: Install Node modules
run: |
@ -108,28 +103,17 @@ jobs:
echo PACKAGE_VERSION=$(node -p -e "require('./package.json').version") >> $GITHUB_ENV || exit 1
- name: Make build
run: pnpm run build-${{ matrix.build_type }}-${{ matrix.build_arch }}-${{ matrix.gui }}
- name: Upload release
uses: actions/upload-release-asset@v1
- name: Upload assets to the GitHub Release
uses: softprops/action-gh-release@v2
with:
upload_url: ${{ github.event.release.upload_url }}
asset_name: multi-downloader-nx-${{ matrix.build_type }}-${{ matrix.build_arch }}-${{ matrix.gui }}.7z
asset_path: ./lib/_builds/multi-downloader-nx-${{ matrix.build_type }}-${{ matrix.build_arch }}-${{ matrix.gui }}.7z
asset_content_type: application/x-7z-compressed
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SHA256 checksum
uses: actions/upload-release-asset@v1
with:
upload_url: ${{ github.event.release.upload_url }}
asset_name: multi-downloader-nx-${{ matrix.build_type }}-${{ matrix.build_arch }}-${{ matrix.gui }}.7z.sha256
asset_path: ./lib/_builds/multi-downloader-nx-${{ matrix.build_type }}-${{ matrix.build_arch }}-${{ matrix.gui }}.7z.sha256
asset_content_type: text/plain
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
files: |
./lib/_builds/multi-downloader-nx-${{ matrix.build_type }}-${{ matrix.build_arch }}-${{ matrix.gui }}.7z
./lib/_builds/multi-downloader-nx-${{ matrix.build_type }}-${{ matrix.build_arch }}-${{ matrix.gui }}.7z.sha256
fail_on_unmatched_files: true
build-docker:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1

View file

@ -6,18 +6,21 @@ on:
pull_request:
branches: [master]
permissions:
contents: read
jobs:
tsc:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 22
node-version: 24
check-latest: true
- run: pnpm i
- run: npx tsc
@ -25,14 +28,14 @@ jobs:
needs: tsc
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 22
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run eslint
@ -40,44 +43,104 @@ jobs:
needs: tsc
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 22
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run prettier
build-test-arm64:
build-test-windows-arm64:
needs: [eslint, prettier, tsc]
runs-on: windows-11-arm
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run test-windows-arm64
build-test-linux-arm64:
needs: [eslint, prettier, tsc]
runs-on: ubuntu-24.04-arm
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run test-linux-arm64
build-test-macos-arm64:
needs: [eslint, prettier, tsc]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 22
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run test-arm64
build-test-x64:
- run: pnpm run test-macos-arm64
build-test-windows-x64:
needs: [eslint, prettier, tsc]
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run test-windows-x64
build-test-linux-x64:
needs: [eslint, prettier, tsc]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 22
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run test-x64
- run: pnpm run test-linux-x64
build-test-macos-x64:
needs: [eslint, prettier, tsc]
runs-on: macos-15-intel
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v2
with:
version: latest
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 24
check-latest: true
- run: pnpm i
- run: pnpm run test-macos-x64

1
.husky/commit-msg Normal file
View file

@ -0,0 +1 @@
npx commitlint --edit .git/COMMIT_EDITMSG

1
.husky/pre-commit Normal file
View file

@ -0,0 +1 @@
npx tsc

View file

@ -6,7 +6,6 @@ import { CrunchyVideoPlayStreams, CrunchyAudioPlayStreams } from './enums';
export type CrunchyDownloadOptions = {
hslang: string;
// kstream: number,
cstream: keyof typeof CrunchyVideoPlayStreams;
vstream: keyof typeof CrunchyVideoPlayStreams;
astream: keyof typeof CrunchyAudioPlayStreams;
@ -23,6 +22,8 @@ export type CrunchyDownloadOptions = {
waittime: number;
fsRetryTime: number;
dlsubs: string[];
subdlfailed?: boolean;
skipMuxOnSubFail: boolean;
skipsubs: boolean;
nosubs?: boolean;
mp4: boolean;
@ -51,6 +52,7 @@ export type CrunchyDownloadOptions = {
scaledBorderAndShadowFix: boolean;
scaledBorderAndShadow: 'yes' | 'no';
originalScriptFix: boolean;
subtitleTimestampFix: boolean;
};
export type CrunchyMultiDownload = {
@ -77,7 +79,7 @@ export type CrunchyMuxOptions = {
defaultAudio: LanguageItem;
ccTag: string;
syncTiming: boolean;
};
} & CrunchyDownloadOptions;
export type CrunchyEpMeta = {
data: {
@ -87,6 +89,7 @@ export type CrunchyEpMeta = {
versions?: EpisodeVersion[] | null;
isSubbed: boolean;
isDubbed: boolean;
durationMs: number;
}[];
seriesTitle: string;
seasonTitle: string;

View file

@ -1,49 +0,0 @@
declare module 'm3u8-parsed' {
export type M3U8 = {
allowCache: boolean;
discontinuityStarts: [];
segments: {
duration: number;
byterange?: {
length: number;
offset: number;
};
uri: string;
key: {
method: string;
uri: string;
};
timeline: number;
}[];
version: number;
mediaGroups: {
[type: string]: {
[index: string]: {
[language: string]: {
default: boolean;
autoselect: boolean;
language: string;
uri: string;
};
};
};
};
playlists: {
uri: string;
timeline: number;
attributes: {
'CLOSED-CAPTIONS': string;
AUDIO: string;
'FRAME-RATE': number;
RESOLUTION: {
width: number;
height: number;
};
CODECS: string;
'AVERAGE-BANDWIDTH': string;
BANDWIDTH: number;
};
}[];
};
export default function (data: string): M3U8;
}

15
TODO.md
View file

@ -1,15 +0,0 @@
# Todo/Future Ideas list
- [ ] Look into implementing wvd file support
- [ ] Merge sync branch with latest master
- [ ] Finish implementing old algorithm
- [ ] Look into adding suggested algorithm [#599](https://github.com/anidl/multi-downloader-nx/issues/599)
- [ ] Remove Funimation
- [ ] Remove old hidive API or find a way to make it work
- [ ] Look into adding other services
- [ ] Refactor downloading code
- [ ] Allow audio and video download at the same time
- [ ] Reduce/Refactor the amount of duplicate/boilerplate code required
- [ ] Create a generic service class for the CLI with set inputs/outputs
- [ ] Modularize site modules to ease addition of new sites
- [ ] Create generic MPD/M3U8 playlist downloader

65
adn.ts
View file

@ -3,11 +3,11 @@ import packageJson from './package.json';
// Node
import path from 'path';
import fs from 'fs-extra';
import fs from 'fs';
import crypto from 'crypto';
// Plugins
import m3u8 from 'm3u8-parsed';
import { Parser } from 'm3u8-parser';
// Modules
import * as fontsData from './modules/module.fontsData';
@ -18,7 +18,6 @@ import * as reqModule from './modules/module.fetch';
import Merger, { Font, MergerInput, SubtitleInput } from './modules/module.merger';
import streamdl from './modules/hls-download';
import { console } from './modules/log';
import { domain } from './modules/module.api-urls';
import { downloaded } from './modules/module.downloadArchive';
import parseSelect from './modules/module.parseSelect';
import parseFileName, { Variable } from './modules/module.filename';
@ -35,6 +34,7 @@ import { ADNVideo, ADNVideos } from './@types/adnVideos';
import { ADNPlayerConfig } from './@types/adnPlayerConfig';
import { ADNStreams } from './@types/adnStreams';
import { ADNSubtitles } from './@types/adnSubtitles';
import { FetchParams } from './modules/module.fetch';
export default class AnimationDigitalNetwork implements ServiceClass {
public cfg: yamlCfg.ConfigObject;
@ -58,7 +58,7 @@ export default class AnimationDigitalNetwork implements ServiceClass {
constructor(private debug = false) {
this.cfg = yamlCfg.loadCfg();
this.token = yamlCfg.loadADNToken();
this.req = new reqModule.Req(domain, debug, false, 'adn');
this.req = new reqModule.Req();
this.locale = 'fr';
}
@ -184,13 +184,14 @@ export default class AnimationDigitalNetwork implements ServiceClass {
password: data.password,
source: 'Web'
});
const authReqOpts: reqModule.Params = {
const authReqOpts: FetchParams = {
method: 'POST',
body: authData,
headers: {
'content-type': 'application/json',
'x-target-distribution': this.locale
}
},
useProxy: true
};
const authReq = await this.req.getData('https://gw.api.animationdigitalnetwork.com/authentication/login', authReqOpts);
if (!authReq.ok || !authReq.res) {
@ -212,7 +213,8 @@ export default class AnimationDigitalNetwork implements ServiceClass {
'content-type': 'application/json',
'x-target-distribution': this.locale
},
body: JSON.stringify({ refreshToken: this.token.refreshToken })
body: JSON.stringify({ refreshToken: this.token.refreshToken }),
useProxy: true
});
if (!authReq.ok || !authReq.res) {
console.error('Token refresh failed!');
@ -326,6 +328,7 @@ export default class AnimationDigitalNetwork implements ServiceClass {
this.cfg.bin = await yamlCfg.loadBinCfg();
let hasAudioStreams = false;
if (options.novids || data.filter((a) => a.type === 'Video').length === 0) return console.info('Skip muxing since no vids are downloaded');
if (options.subdlfailed && options.skipMuxOnSubFail) return console.info('Skip muxing since some subtitles failed to download');
if (data.some((a) => a.type === 'Audio')) {
hasAudioStreams = true;
}
@ -407,7 +410,7 @@ export default class AnimationDigitalNetwork implements ServiceClass {
const bin = Merger.checkMerger(this.cfg.bin, options.mp4, options.forceMuxer);
// collect fonts info
// mergers
let isMuxed = false;
let isMuxed: boolean = false;
if (options.syncTiming) {
await merger.createDelays();
}
@ -597,22 +600,32 @@ export default class AnimationDigitalNetwork implements ServiceClass {
dlFailed = true;
} else {
const streamPlaylistBody = await streamPlaylistsReq.res.text();
const streamPlaylists = m3u8(streamPlaylistBody);
// Init parser
const parser = new Parser();
// Parse M3U8
parser.push(streamPlaylistBody);
parser.end();
const streamPlaylists = parser.manifest;
if (!streamPlaylists) throw Error('Failed to parse M3U8');
const plServerList: string[] = [],
plStreams: Record<string, Record<string, string>> = {},
plQuality: {
str: string;
dim: string;
CODECS: string;
RESOLUTION: {
width: number;
height: number;
CODECS?: string;
RESOLUTION?: {
width?: number;
height?: number;
};
}[] = [];
for (const pl of streamPlaylists.playlists) {
for (const pl of streamPlaylists.playlists ?? []) {
// set quality
const plResolution = pl.attributes.RESOLUTION;
const plResolutionText = `${plResolution.width}x${plResolution.height}`;
const plResolutionText = `${plResolution?.width}x${plResolution?.height}`;
// set codecs
const plCodecs = pl.attributes.CODECS;
// parse uri
@ -639,7 +652,7 @@ export default class AnimationDigitalNetwork implements ServiceClass {
plStreams[plServer][plResolutionText] = pl.uri;
}
// set plQualityStr
const plBandwidth = Math.round(pl.attributes.BANDWIDTH / 1024);
const plBandwidth = Math.round(pl.attributes?.BANDWIDTH ?? 0 / 1024);
const qualityStrAdd = `${plResolutionText} (${plBandwidth}KiB/s)`;
const qualityStrRegx = new RegExp(qualityStrAdd.replace(/([:()/])/g, '\\$1'), 'm');
const qualityStrMatch = !plQuality
@ -688,12 +701,12 @@ export default class AnimationDigitalNetwork implements ServiceClass {
{
name: 'height',
type: 'number',
replaceWith: quality === 0 ? (plQuality[plQuality.length - 1].RESOLUTION.height as number) : plQuality[quality - 1].RESOLUTION.height
replaceWith: quality === 0 ? (plQuality[plQuality.length - 1].RESOLUTION?.height as number) : (plQuality[quality - 1].RESOLUTION?.height as number)
},
{
name: 'width',
type: 'number',
replaceWith: quality === 0 ? (plQuality[plQuality.length - 1].RESOLUTION.width as number) : plQuality[quality - 1].RESOLUTION.width
replaceWith: quality === 0 ? (plQuality[plQuality.length - 1].RESOLUTION?.width as number) : (plQuality[quality - 1].RESOLUTION?.width as number)
}
);
@ -709,7 +722,17 @@ export default class AnimationDigitalNetwork implements ServiceClass {
dlFailed = true;
} else {
const chunkPageBody = await chunkPage.res.text();
const chunkPlaylist = m3u8(chunkPageBody);
// Init parser
const parser = new Parser();
// Parse M3U8
parser.push(chunkPageBody);
parser.end();
const chunkPlaylist = parser.manifest;
if (!chunkPlaylist) throw Error('Failed to parse M3U8');
const totalParts = chunkPlaylist.segments.length;
const mathParts = Math.ceil(totalParts / options.partsize);
const mathMsg = `(${mathParts}*${options.partsize})`;
@ -817,12 +840,14 @@ export default class AnimationDigitalNetwork implements ServiceClass {
const subtitlesUrlReq = await this.req.getData(streams.links.subtitles.all);
if (!subtitlesUrlReq.ok || !subtitlesUrlReq.res) {
console.error('Subtitle location request failed!');
options.subdlfailed = true;
return undefined;
}
const subtitleUrl = (await subtitlesUrlReq.res.json()) as { location: string };
const encryptedSubtitlesReq = await this.req.getData(subtitleUrl.location);
if (!encryptedSubtitlesReq.ok || !encryptedSubtitlesReq.res) {
console.error('Subtitle request failed!');
options.subdlfailed = true;
return undefined;
}
const encryptedSubtitles = await encryptedSubtitlesReq.res.text();
@ -836,6 +861,7 @@ export default class AnimationDigitalNetwork implements ServiceClass {
const subtitles = JSON.parse(decryptedData) as ADNSubtitles;
if (Object.keys(subtitles).length === 0) {
console.warn('No subtitles found.');
options.subdlfailed = true;
}
for (const subName in subtitles) {
let subLang: langsData.LanguageItem;
@ -927,6 +953,7 @@ export default class AnimationDigitalNetwork implements ServiceClass {
}
} else {
console.warn("Couldn't find subtitles.");
options.subdlfailed = true;
}
} else {
console.info('Subtitles downloading skipped!');

1
commitlint.config.ts Normal file
View file

@ -0,0 +1 @@
export default { extends: ['@commitlint/config-conventional'] };

View file

@ -1,5 +1,4 @@
ffmpeg: 'ffmpeg.exe'
mkvmerge: 'mkvmerge.exe'
ffprobe: 'ffprobe.exe'
mp4decrypt: 'mp4decrypt.exe'
shaka: 'shaka-packager.exe'

File diff suppressed because it is too large Load diff

23
dev.js
View file

@ -1,23 +0,0 @@
const { exec } = require('child_process');
const path = require('path');
const toRun = process.argv.slice(2).join(' ').split('---');
const waitForProcess = async (proc) => {
return new Promise((resolve, reject) => {
proc.stdout?.on('data', (data) => process.stdout.write(data));
proc.stderr?.on('data', (data) => process.stderr.write(data));
proc.on('close', resolve);
proc.on('error', reject);
});
};
(async () => {
await waitForProcess(exec('pnpm run tsc test false'));
for (let command of toRun) {
await waitForProcess(
exec(`node index.js --service hidive ${command}`, {
cwd: path.join(__dirname, 'lib')
})
);
}
})();

View file

@ -1,200 +0,0 @@
## Change Log
This changelog is out of date and wont be continued. Please see the releases comments, or if not present the commit comments.
### 4.7.0 (unreleased)
- Change subtitles parser from ttml to vtt
- Improve help command
- Update modules
#### Known issues:
- Proxy not supported
### 4.6.1 (2020/09/19)
- Update modules
#### Known issues:
- Proxy not supported
### 4.6.0 (2020/06/03)
- Bug fixes and improvements
#### Known issues:
- Proxy not supported
### 4.5.1 (2020/03/10)
- Better binary files handling
- Binary build for windows
#### Known issues:
- Proxy not supported
### 4.5.0 (2020/01/21)
- Resume downloading
#### Known issues:
- Proxy not supported
### 4.4.2 (2019/07/21)
- Better proxy handling for stream download
### 4.4.1 (2019/07/21)
- Fixed proxy for stream download
### 4.4.0 (2019/06/04)
- Added `--novids` option (Thanks to @subdiox)
- Update modules
### 4.3.2 (2019/05/09)
- Code improvements
- Fix `hls-download` error printing
### 4.3.1 (2019/05/09)
- Fix auto detection max quality (Regression in d7d280c)
### 4.3.0 (2019/05/09)
- Better server selection (Closes #42)
### 4.2.1 (2019/05/04)
- Filter duplicate urls for cloudfront.net (Closes #40)
### 4.2.0 (2019/05/02)
- Replace `request` module with `got`
- Changed proxy cli options
- Changed `login` option name to `auth`
- Changed `hls-download` parallel download configuration from 5 parts to 10
- Update modules
### 4.1.0 (2019/04/05)
- CLI options for login moved to CUI
- Removed showing set token at startup
### 4.0.5 (2019/02/09)
- Fix downloading shows with autoselect max quality
### 4.0.4 (2019/01/26)
- Fix search when shows not found
- Update modules
### 4.0.3 (2018/12/06)
- Select only non-encrypted (HLS) streams, encrypted streams is MPEG-DASH
### 4.0.2 (2018/11/25)
- Fix typos and update modules
### 4.0.1 (2018/11/23)
- Code refactoring and small fixes
### 4.0.0 RC 1 (2018/11/17)
- Select range of episodes using hyphen-sequence
- Skip muxing if executables not found
- Fixed typos and duplicate options
### 4.0.0 Beta 2 (2018/11/12)
- Select alternative server
- Updated readme
### 4.0.0 Beta 1 (2018/11/10)
- Rearrange folders structure
- Configuration changed to yaml format
- Muxing changed to MKV by default
- tsMuxeR+mp4box replaced with FFMPEG
- Updated commands help and readme
- Fixed typos and duplicate options
- `ttml2srt` moved to separate module
- Drop `m3u8-stream-list` module
- Code improvements
### 3.2.8 (2018/06/16)
- Fix video request when token not specified
### 3.2.7 (2018/06/15)
- Update modules
### 3.2.6 (2018/02/18)
- Fix commands help
### 3.2.5 (2018/02/12)
- Fixes and update modules
### 3.2.4 (2018/02/01)
- Update modules
### 3.2.3 (2018/01/31)
- Rearrange folders structure
### 3.2.2 (2018/01/16)
- Update modules
### 3.2.1 (2018/01/16)
- Update modules
- Small fixes
### 3.2.0 (2018/01/16)
- `hls-download` module moved to independent module
- Auth for socks proxy
### 3.1.0 (2017/12/30)
- Convert DXFP (TTML) subtitles to SRT format
### 3.0.1 (2017/12/05)
- Check subtitles availability
- Download subtitles in SRT format instead of VTT
- Extended hls download progress info
### 3.0.0 Beta 3 (2017/12/03)
- Restored MKV and MP4 muxing
- Convert VTT subtitles to SRT format
### 3.0.0 Beta 2 (2017/10/18)
- Fix video downloading
### 3.0.0 Beta 1 (2017/10/17)
- Major code changes and improvements
- Drop Streamlink and added own module for hls download
### 2.5.0 (2017/09/04)
- `nosubs` option
- Request video with app api
### 2.4.1 (2017/09/02)
- Fixed typo in package.json
- Fix #11: URL for getting video stream url was changed
### 2.4.0 (2017/07/04)
- IPv4 Socks5 proxy support
### 2.3.3 (2017/06/19)
- Removed forgotten debug code
### 2.3.2 (2017/06/19)
- Fix #5: Script fails to multiplex unique file names
### 2.3.1 (2017/04/29)
- Code improvements
### 2.3.0 (2017/04/27)
- Code improvements
### 2.2.5 (2017/04/17)
- Minor code improvements and fixes
### 2.1.4 (2017/04/10)
- Minor changes
### 2.1.3 (2017/04/10)
- Minor changes and fixes
### 2.1.2 (2017/04/10)
- Fix config path
### 2.1.1 (2017/04/10)
- Minor text changes
- Fix config
- Minor changes
### 2.1.0 (2017/04/10)
- First stable release
### 2.0.0 Beta (lost in time)
- First public release

View file

@ -1,4 +1,4 @@
# multi-downloader-nx (v5.6.0)
# multi-downloader-nx (v5.6.9)
If you find any bugs in this documentation or in the program itself please report it [over on GitHub](https://github.com/anidl/multi-downloader-nx/issues).
@ -195,19 +195,19 @@ Select the server to use
| --- | --- | --- | --- | --- | --- | ---|
| Crunchyroll | `--cstream ${device}` | `string` | `No`| `--cs` | [`androidtv`, `android`, `androidtab`, `none`] | `NaN` |
(Please use --vstream and --astream instead, this will deprecate soon) Select a specific Crunchyroll playback endpoint by device. Since Crunchyroll has started rolling out their new VBR encodes, we highly recommend using a TV endpoint (e.g. vidaa, samsungtv, lgtv, rokutv, chromecast, firetv, androidtv) to access the old CBR encodes. Please note: The older encodes do not include the new 192 kbps audio, the new audio is only available with the new VBR encodes.
(Please use --vstream and --astream instead, this will deprecate soon)
#### `--vstream`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Choices** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | --- | ---|
| Crunchyroll | `--vstream ${device}` | `string` | `No`| `--vs` | [`androidtv`, `android`, `androidtab`, `none`] | `androidtv`| `vstream: ` |
Select a specific Crunchyroll video playback endpoint by device.
Select a specific Crunchyroll video playback endpoint by device. androidtv provides the best video (CBR).
#### `--astream`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Choices** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | --- | ---|
| Crunchyroll | `--astream ${device}` | `string` | `No`| `--as` | [`androidtv`, `android`, `androidtab`, `none`] | `android`| `astream: ` |
Select a specific Crunchyroll audio playback endpoint by device.
Select a specific Crunchyroll audio playback endpoint by device. android provides the best audio (192 kbps).
#### `--tsd`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | ---|
@ -227,6 +227,12 @@ Download video with specific hardsubs
Download subtitles by language tag (space-separated)
Crunchy Only: un, en, en-IN, es-419, es-419, es-ES, pt-BR, pt-PT, fr, de, ar, ar, it, ru, tr, hi, zh-CN, zh-TW, zh-HK, ko, ca-ES, pl-PL, th-TH, ta-IN, ms-MY, vi-VN, id-ID, te-IN, ja
#### `--skipMuxOnSubFail`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | ---|
| All | `--skipMuxOnSubFail ` | `boolean` | `No`| `NaN` | `false`| `skipMuxOnSubFail: ` |
Skips muxing when a subtitle download fails.
#### `--noASSConv`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | ---|
@ -269,6 +275,12 @@ Select if ScaledBorderAndShadow should be set to "yes" or "no".
| Crunchyroll | `--originalScriptFix ` | `boolean` | `No`| `NaN` | `true`| `originalScriptFix: ` |
Removes the URL in the Original Script line of the ASS subtitles, it prevents from bricking the subs in VLC (Fonts not loading when url not returning 200).
#### `--subtitleTimestampFix`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | ---|
| Crunchyroll | `--subtitleTimestampFix ` | `boolean` | `No`| `NaN` | `false`| `subtitleTimestampFix: ` |
Fixes subtitle dialogues that go over the video length (deletes dialogues where start is over video length and updates the end timestamp when end is over video length).
#### `--novids`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **cli-default Entry**
| --- | --- | --- | --- | --- | ---|
@ -525,6 +537,18 @@ If true, the tool will output the raw data from the API (Where applicable, the f
| All | `--rawoutput ` | `string` | `No`| `NaN` | ``| `rawoutput: ` |
Provide a path to output the raw data from the API into a file (Where applicable, the feature is a WIP)
#### `--proxy`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | ---|
| All | `--proxy ${proxy_url}` | `string` | `No`| `NaN` | ``| `proxy: ` |
Uses Proxy on geo-restricted or geo-defining endpoints (e.g. https://127.0.0.1:1080 or http://127.0.0.1:1080)
#### `--proxyAll`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **Default** |**cli-default Entry**
| --- | --- | --- | --- | --- | --- | ---|
| All | `--proxyAll ` | `boolean` | `No`| `NaN` | `false`| `proxyAll: ` |
Proxies everything, not recommended. Proxy needs to be defined.
### Help
#### `--help`
| **Service** | **Usage** | **Type** | **Required** | **Alias** | **cli-default Entry**

View file

@ -1,4 +1,4 @@
# multi-downloader-nx (v5.5.3)
# multi-downloader-nx (v5.6.5)
If you find any bugs in this documentation or in the program itself please report it [over on GitHub](https://github.com/anidl/multi-downloader-nx/issues).
@ -23,6 +23,7 @@ This tool is not responsible for your actions; please make an informed decision
- [Playready CDM](#playready)
- [Installation](#installation)
- [Configuration](#configuration)
- [Endpoint Notes](#endpoint-notes)
- [Usage](#usage)
- [Authentication](#authentication)
- [Output Directory](#output-directory)
@ -38,7 +39,7 @@ This tool is not responsible for your actions; please make an informed decision
The dependencies for this application are:
- [ffmpeg](https://www.videohelp.com/software/ffmpeg)
- [MKVToolNix](https://www.videohelp.com/software/MKVToolNix)
- Either [Bento4-SDK (mp4decrypt)](https://www.bento4.com/downloads/) or [shaka-packager](https://github.com/shaka-project/shaka-packager/releases)
- Either [Bento4-SDK (mp4decrypt)](https://www.bento4.com/downloads/) or [shaka-packager](https://github.com/stratumadev/shaka-packager/releases/latest)
For ffmpeg, i chose this option from the website: \
![ffmpeg download](./imgs/ffmpeg.png)
@ -125,7 +126,6 @@ C:.
12. Great! Now we have all dependencies installed and available in our PATH. To confirm that everything is working, open a new Command Prompt window and run the following commands:
```
ffmpeg
ffprobe
mkvmerge
mp4decrypt (or shaka-packager's .exe name, if you chose that instead)
```
@ -140,24 +140,32 @@ You have now completed the dependencies installation!
### Widevine
When you dump your CDM key, you will usually get 2 files. One ending in `.bin` and the other in `.pem`. \
All you need to do is place both files in the `widevine` folder, which is in the same directory you opened `aniDL.exe` from. \
It will detect what each file is based on the file contents.
If you do want to name them though (optional):
- The `.bin` file should be named `device_client_id_blob.bin` or `client_id.bin`
- The `.pem` file should be named `device_private_key.pem` or `private_key.pem`
Again, the renaming is totally optional. Just make sure both files are in the `widevine` folder.
If you have a Widevine CDM key dump, its either going to be a single `.wvd` file or a pair of `.bin` and `.pem` files. \
In any case, multi-downloader-nx supports both formats. Place them in the `widevine` folder and you are good to go.
### Playready
If you have a Playready CDM key dump, you just need to make sure:
1. Its provisioned as a V3 Device by [pyplayready](https://github.com/ready-dl/pyplayready).
2. Security level is either SL2000 or SL3000
3. Make sure you are using shaka-packager v2.6.1, as later versions have issues.
1. Security level is either SL2000 or SL3000
2. Make sure you are using the latest version of shaka-packager from Stratuma, as he has patched it to work with multi-downloader-nx.\
You can find his releases [here](https://github.com/stratumadev/shaka-packager/releases/latest)
After you have confirmed the above, place the file(s) in the `playready` folder, which is in the same directory you opened `aniDL.exe` from.
File type does not matter, as multi-downloader-nx supports both `.prd` device files and the `bgroupcert.dat` and `zgpriv.dat` blobs. \
`.prd` files can be placed into the `playready` folder with whatever name it has.
But if you are using the 2 `.dat` blob files, you need to rename them like so:
- `.dat` file that is 1.xx KiB -> `bgroupcert.dat`
- `.dat` file that is 32 bytes -> `zgpriv.dat`
Output form [mediainfo](https://mediaarea.net/en/MediaInfo) can help you identify which file is which.
```
bgroupcert.dat
1.26 KiB
zgpriv.dat
32.0 Bytes
```
Keep in mind that the `bgroupcert.dat` may not always be exactly 1.26 KiB but it should be in the KiB range, while the `zgpriv.dat` will always be 32 bytes.
## Installation
@ -213,6 +221,25 @@ If you wanted to set `--tsd` to `true`, you would do it like this:
tsd: true
```
## Endpoint Notes
This section explains what each endpoint is capable of, and what subscription level is required to use it. \
If you are new to the project, please use the defaults found in the [`cli-defaults.yml`](https://github.com/HyperNylium/multi-downloader-nx/blob/master/config/cli-defaults.yml) file, as those are the recommended settings.
| Endpoint | Video quality | Audio quality | Subscription level required |
|--------------|-----------------------------|---------------|-----------------------------|
| `android` | 4-6k variable bitrate (VBR) | 192kbps | "Fan" or higher |
| `androidtab` | 4-6k variable bitrate (VBR) | 128kbps | "Fan" or higher |
| `androidtv` | 8k constent bitrate (CBR) | 128kbps | "Fan" or higher |
> [!NOTE]
> If you pick 192kbps audio (`--astream android`) with 8k CBR video (`--vstream androidtv`) but dont have "Mega Fan" sub or higher,
> the audio will fall back to 128 kbps, which will download the CBR video with 128 kbps audio.
> [!NOTE]
> 192 kbps audio comes from the `android` endpoint. CBR video comes from `androidtv` endpoint. \
> Using both means you are using **two streams**, which needs the "Mega Fan" tier or higher.
## Usage
### Authentication
@ -279,4 +306,4 @@ If you wanted to download the show via the ID, you would enter `G4PH0WXVJ` in th
#### Filename Overwrite
1. **Filename Overwrite**: This is where you can change the filename format for the downloaded episodes. [`DOCUMENTATION.md`](https://github.com/anidl/multi-downloader-nx/blob/master/docs/DOCUMENTATION.md#--filename) for all available options.
1. **Filename Overwrite**: This is where you can change the filename format for the downloaded episodes. [`DOCUMENTATION.md`](https://github.com/anidl/multi-downloader-nx/blob/master/docs/DOCUMENTATION.md#--filename) for all available options.

View file

@ -17,11 +17,10 @@ This application is not endorsed by or affiliated with *Crunchyroll*, *Hidive* o
By default this application uses the following paths to programs (main executables):
* `ffmpeg.exe` (From PATH)
* `ffprobe.exe` (From PATH)
* `mkvmerge.exe` (From PATH)
* `mp4decrypt.exe` (From PATH) (or shaka-packager)
* `shaka-packager.exe` (v2.6.1 or older) (From PATH) (or mp4decrypt)
* `ffmpeg.exe` (Windows) or `ffmpeg` (other) (From PATH)
* `mkvmerge.exe` (Windows) or `mkvmerge` (other) (From PATH)
* `mp4decrypt.exe` (Windows) or `mp4decrypt` (other) (From PATH) (or shaka-packager)
* `shaka-packager.exe` (Windows) or `shaka-packager` (other) (From PATH) (or mp4decrypt)
To change these paths you need to edit `bin-path.yml` in `./config/` directory.
@ -102,7 +101,7 @@ If you want to package the application, run pnpm run build-`{platform}`-`{type}`
### Decryption Requirements
* mp4decrypt >= Any (http://www.bento4.com/) - Only required for decrypting (or shaka-packager)
* shaka-packager >= Any (https://github.com/shaka-project/shaka-packager/releases) - Only required for decrypting (or mp4decrypt)
* shaka-packager >= Any (https://github.com/stratumadev/shaka-packager/releases) - Only required for decrypting (or mp4decrypt)
### Instructions (Widevine)
@ -111,5 +110,4 @@ In order to decrypt DRM content, you will need to have a dumped CDM, after that
### Instructions (Playready)
Playready CDMs are very easy to obtain, you can find them even on Github.
Place the CDM in the `./playready/` directory and you're all set!
**IMPORTANT**: The Playready CDM (SL2000/SL3000) needs to be provisioned as a **V3 Device** by pyplayready (https://github.com/ready-dl/pyplayready).
Place the CDM files (bgroupcert.dat and zgpriv.dat) in the `./playready/` directory and you're all set!

View file

@ -33,7 +33,7 @@ export default tseslint.config(
}
},
{
ignores: ['**/lib', '**/videos', '**/build', 'dev.js', 'tsc.ts']
ignores: ['**/lib', '**/videos', '**/build', 'tsc.ts']
},
{
files: ['gui/react/**/*'],

View file

@ -4,34 +4,34 @@
"private": true,
"dependencies": {
"@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.0",
"@mui/icons-material": "^7.1.2",
"@emotion/styled": "^11.14.1",
"@mui/icons-material": "^7.3.5",
"@mui/lab": "7.0.0-beta.12",
"@mui/material": "^7.1.2",
"concurrently": "^9.2.0",
"@mui/material": "^7.3.5",
"notistack": "^3.0.2",
"react": "^19.1.0",
"react-dom": "^19.1.0",
"typescript": "^5.8.3",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"uuid": "^11.1.0",
"ws": "^8.18.2"
"ws": "^8.18.3"
},
"devDependencies": {
"@babel/cli": "^7.27.2",
"@babel/core": "^7.27.4",
"@babel/preset-env": "^7.27.2",
"@babel/preset-react": "^7.27.1",
"@babel/preset-typescript": "^7.27.1",
"@types/node": "^22.15.32",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",
"@babel/cli": "^7.28.3",
"@babel/core": "^7.28.5",
"@babel/preset-env": "^7.28.5",
"@babel/preset-react": "^7.28.5",
"@babel/preset-typescript": "^7.28.5",
"@types/node": "^24.10.1",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@types/uuid": "^10.0.0",
"babel-loader": "^10.0.0",
"concurrently": "^9.2.1",
"css-loader": "^7.1.2",
"html-webpack-plugin": "^5.6.3",
"html-webpack-plugin": "^5.6.5",
"style-loader": "^4.0.0",
"ts-node": "^10.9.2",
"webpack": "^5.99.9",
"typescript": "^5.9.3",
"webpack": "^5.103.0",
"webpack-cli": "^6.0.1",
"webpack-dev-server": "^5.2.2"
},

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1 @@
dangerouslyAllowAllBuilds: true

View file

@ -1,5 +1,5 @@
import React from 'react';
import { Container, Box, ThemeProvider, createTheme, Theme } from '@mui/material';
import { Box, ThemeProvider, createTheme, Theme } from '@mui/material';
const makeTheme = (mode: 'dark' | 'light'): Partial<Theme> => {
return createTheme({

View file

@ -1,5 +1,5 @@
import { Add } from '@mui/icons-material';
import { Box, Button, Dialog, Divider, Typography } from '@mui/material';
import { Box, Button, Dialog, Divider } from '@mui/material';
import React from 'react';
import DownloadSelector from './DownloadSelector/DownloadSelector';
import EpisodeListing from './DownloadSelector/Listing/EpisodeListing';

View file

@ -1,4 +1,4 @@
import React, { ChangeEvent } from 'react';
import React from 'react';
import { Box, Button, Divider, FormControl, InputBase, InputLabel, Link, MenuItem, Select, TextField, Tooltip, Typography } from '@mui/material';
import useStore from '../../../hooks/useStore';
import MultiSelect from '../../reusable/MultiSelect';

View file

@ -27,17 +27,15 @@ const EpisodeListing: React.FC = () => {
}, [store.episodeListing]);
const close = () => {
dispatch({
type: 'episodeListing',
payload: []
});
const mergedEpisodes = [...parseEpisodes(store.downloadOptions.e), ...selected];
dispatch({
type: 'downloadOptions',
payload: {
...store.downloadOptions,
e: `${[...new Set([...parseSelect(store.downloadOptions.e), ...selected])].join(',')}`
e: serializeEpisodes(mergedEpisodes)
}
});
dispatch({ type: 'episodeListing', payload: [] });
};
const getEpisodesForSeason = (season: string | 'all') => {
@ -168,6 +166,16 @@ const EpisodeListing: React.FC = () => {
</Dialog>
);
};
const parseEpisodes = (e: string): string[] => {
if (!e) return [];
return e
.split(',')
.map((s) => s.trim())
.filter((s) => s.length > 0);
};
const serializeEpisodes = (episodes: string[]): string => {
return [...new Set(episodes)].join(',');
};
const parseSelect = (s: string): string[] => {
const ret: string[] = [];

View file

@ -1,4 +1,4 @@
import { Badge, Box, Button, CircularProgress, Divider, IconButton, LinearProgress, Skeleton, Tooltip, Typography } from '@mui/material';
import { Box, CircularProgress, IconButton, LinearProgress, Skeleton, Tooltip, Typography } from '@mui/material';
import React from 'react';
import { messageChannelContext } from '../../../provider/MessageChannel';
import { queueContext } from '../../../provider/QueueProvider';

View file

@ -1,5 +1,5 @@
import React from 'react';
import { Divider, Box, Button, Typography, Avatar } from '@mui/material';
import { Box, Button, Typography, Avatar } from '@mui/material';
import useStore from '../hooks/useStore';
import { StoreState } from './Store';

View file

@ -1,8 +1,7 @@
import express from 'express';
import { ensureConfig, loadCfg, workingDir } from '../../modules/module.cfg-loader';
import cors from 'cors';
import ServiceHandler from './serviceHandler';
import open from 'open';
import ServiceHandler from './serviceHandler';
import path from 'path';
import { PublicWebSocket } from './websocket';
import { console } from '../../modules/log';
@ -19,7 +18,14 @@ const app = express();
export { app, cfg };
app.use(express.json());
app.use(cors());
app.use((_, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET,POST,PUT,DELETE,OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization');
next();
});
app.use(express.static(path.join(workingDir, 'gui', 'server', 'build'), { maxAge: 1000 * 60 * 20 }));
console.info(`\n=== Multi Downloader NX GUI ${packageJson.version} ===\n`);

View file

@ -1,6 +1,6 @@
// build-in
import path from 'path';
import fs from 'fs-extra';
import fs from 'fs';
// package program
import packageJson from './package.json';
@ -19,7 +19,7 @@ import vtt2ass from './modules/module.vtt2ass';
import Helper from './modules/module.helper';
// load req
import { domain, api } from './modules/module.api-urls';
import { api } from './modules/module.api-urls';
import * as reqModule from './modules/module.fetch';
import { DownloadedMedia } from './@types/hidiveTypes';
import parseFileName, { Variable } from './modules/module.filename';
@ -36,7 +36,8 @@ import { NewHidiveEpisode } from './@types/newHidiveEpisode';
import { NewHidivePlayback, Subtitle } from './@types/newHidivePlayback';
import { MPDParsed, parse } from './modules/module.transform-mpd';
import { canDecrypt, getKeysWVD, cdm, getKeysPRD } from './modules/cdm';
import { KeyContainer } from './modules/widevine/license';
import { FetchParams } from './modules/module.fetch';
import { KeyContainer } from 'widevine';
export default class Hidive implements ServiceClass {
public cfg: yamlCfg.ConfigObject;
@ -46,7 +47,7 @@ export default class Hidive implements ServiceClass {
constructor(private debug = false) {
this.cfg = yamlCfg.loadCfg();
this.token = yamlCfg.loadNewHDToken();
this.req = new reqModule.Req(domain, debug, false, 'hd');
this.req = new reqModule.Req();
}
public async cli() {
@ -127,7 +128,7 @@ export default class Hidive implements ServiceClass {
method: method as 'GET' | 'POST',
url: (api.hd_new_api + endpoint) as string,
body: body,
useProxy: true
useProxy: false
};
// get request type
const isGet = method == 'GET';
@ -139,8 +140,10 @@ export default class Hidive implements ServiceClass {
options.headers['Authorization'] = authHeader;
} else if (authType == 'auth') {
options.headers['Authorization'] = `Bearer ${this.token.authorisationToken}`;
options.useProxy = true;
} else if (authType == 'refresh') {
options.headers['Authorization'] = `Bearer ${this.token.refreshToken}`;
options.useProxy = true;
} else if (authType == 'both') {
options.headers['Authorization'] = `Mixed ${this.token.authorisationToken} ${this.token.refreshToken}`;
}
@ -148,10 +151,11 @@ export default class Hidive implements ServiceClass {
console.debug('[DEBUG] Request params:');
console.debug(options);
}
const apiReqOpts: reqModule.Params = {
const apiReqOpts: FetchParams = {
method: options.method,
headers: options.headers as Record<string, string>,
body: options.body as string
body: options.body as string,
useProxy: options.useProxy
};
let apiReq = await this.req.getData(options.url, apiReqOpts);
if (!apiReq.ok || !apiReq.res) {
@ -499,9 +503,10 @@ export default class Hidive implements ServiceClass {
selMark = '✓ ';
}
console.info(
'%s[%s] %s',
'%s[%s] [%s] %s',
selMark,
'S' + parseFloat(showData[i].episodeInformation.seasonNumber + '') + 'E' + parseFloat(showData[i].episodeInformation.episodeNumber + ''),
'E.' + (showData[i].id ?? 0),
showData[i].title
);
}
@ -929,7 +934,7 @@ export default class Hidive implements ServiceClass {
} else {
console.info('Decryption done for video');
if (!options.nocleanup) {
fs.removeSync(`${tempTsFile}.video.enc.m4s`);
fs.unlinkSync(`${tempTsFile}.video.enc.m4s`);
}
fs.copyFileSync(`${tempTsFile}.video.m4s`, `${tsFile}.video.m4s`);
fs.unlinkSync(`${tempTsFile}.video.m4s`);
@ -1023,7 +1028,7 @@ export default class Hidive implements ServiceClass {
return undefined;
} else {
if (!options.nocleanup) {
fs.removeSync(`${tempTsFile}.audio.enc.m4s`);
fs.unlinkSync(`${tempTsFile}.audio.enc.m4s`);
}
fs.copyFileSync(`${tempTsFile}.audio.m4s`, `${tsFile}.audio.m4s`);
fs.unlinkSync(`${tempTsFile}.audio.m4s`);
@ -1099,12 +1104,14 @@ export default class Hidive implements ServiceClass {
});
} else {
console.warn(`Failed to download subtitle: ${sxData.file}`);
options.subdlfailed = true;
}
}
subIndex++;
}
} else {
console.warn("Can't find urls for subtitles!");
options.subdlfailed = true;
}
} else {
console.info('Subtitles downloading skipped!');
@ -1121,6 +1128,7 @@ export default class Hidive implements ServiceClass {
this.cfg.bin = await yamlCfg.loadBinCfg();
let hasAudioStreams = false;
if (options.novids || data.filter((a) => a.type === 'Video').length === 0) return console.info('Skip muxing since no vids are downloaded');
if (options.subdlfailed && options.skipMuxOnSubFail) return console.info('Skip muxing since some subtitles failed to download');
if (data.some((a) => a.type === 'Audio')) {
hasAudioStreams = true;
}

View file

@ -1,11 +1,19 @@
import { console } from './modules/log';
import { ServiceClass } from './@types/serviceClassInterface';
import { appArgv, overrideArguments } from './modules/module.app-args';
import * as yamlCfg from './modules/module.cfg-loader';
import { makeCommand, addToArchive } from './modules/module.downloadArchive';
import Crunchy from './crunchy';
import Hidive from './hidive';
import ADN from './adn';
import update from './modules/module.updater';
const SERVICES: Record<string, any> = {
crunchy: Crunchy,
hidive: Hidive,
adn: ADN
};
(async () => {
const cfg = yamlCfg.loadCfg();
const argv = appArgv(cfg.cli);
@ -40,47 +48,27 @@ import update from './modules/module.updater';
);
console.info('Added %s to the downloadArchive list', argv.s === undefined ? argv.series : argv.s);
}
} else if (argv.downloadArchive) {
} else if (argv.downloadArchive && argv.service) {
const ids = makeCommand(argv.service);
for (const id of ids) {
overrideArguments(cfg.cli, id);
/* Reimport module to override appArgv */
Object.keys(require.cache).forEach((key) => {
if (key.endsWith('crunchy.js') || key.endsWith('hidive.js')) delete require.cache[key];
});
let service: ServiceClass;
switch (argv.service) {
case 'crunchy':
service = new (await import('./crunchy')).default();
break;
case 'hidive':
service = new (await import('./hidive')).default();
break;
case 'adn':
service = new (await import('./adn')).default();
break;
default:
service = new (await import(`./${argv.service}`)).default();
break;
const Service = SERVICES[argv.service];
if (!Service) {
console.error('Unknown service:', argv.service);
process.exit(1);
}
const service = new Service();
await service.cli();
}
} else {
let service: ServiceClass;
switch (argv.service) {
case 'crunchy':
service = new (await import('./crunchy')).default();
break;
case 'hidive':
service = new (await import('./hidive')).default();
break;
case 'adn':
service = new (await import('./adn')).default();
break;
default:
service = new (await import(`./${argv.service}`)).default();
break;
} else if (argv.service) {
const Service = SERVICES[argv.service];
if (!Service) {
console.error('Unknown service:', argv.service);
process.exit(1);
}
const service = new Service();
await service.cli();
}
})();

View file

@ -1,6 +1,6 @@
// build requirements
import crypto from 'crypto';
import fs from 'fs-extra';
import fs from 'fs';
import pkg from '../package.json';
import modulesCleanup from 'removeNPMAbsolutePaths';
import { exec } from '@yao-pkg/pkg';
@ -10,7 +10,7 @@ import esbuild from 'esbuild';
import path from 'path';
const buildsDir = './_builds';
const nodeVer = 'node22-';
const nodeVer = 'node24-';
type BuildTypes = `${'windows' | 'macos' | 'linux' | 'linuxstatic' | 'alpine'}-${'x64' | 'arm64'}` | 'linuxstatic-armv7';
@ -38,14 +38,14 @@ async function buildBinary(buildType: BuildTypes, gui: boolean) {
}
await modulesCleanup('.');
if (!fs.existsSync(buildsDir)) {
fs.mkdirSync(buildsDir);
fs.mkdirSync(buildsDir, { recursive: true });
}
const buildFull = `${buildStr}-${getFriendlyName(buildType)}-${gui ? 'gui' : 'cli'}`;
const buildDir = `${buildsDir}/${buildFull}`;
if (fs.existsSync(buildDir)) {
fs.removeSync(buildDir);
fs.rmSync(buildDir, { recursive: true, force: true });
}
fs.mkdirSync(buildDir);
fs.mkdirSync(buildDir, { recursive: true });
console.info('Running esbuild');
const build = await esbuild.build({
@ -79,26 +79,43 @@ async function buildBinary(buildType: BuildTypes, gui: boolean) {
}
// Moving required default files/folders into build dir
fs.mkdirSync(`${buildDir}/config`);
fs.mkdirSync(`${buildDir}/videos`);
fs.mkdirSync(`${buildDir}/widevine`);
fs.mkdirSync(`${buildDir}/playready`);
fs.copySync('./config/bin-path.yml', `${buildDir}/config/bin-path.yml`);
fs.copySync('./config/cli-defaults.yml', `${buildDir}/config/cli-defaults.yml`);
fs.copySync('./config/dir-path.yml', `${buildDir}/config/dir-path.yml`);
fs.copySync('./config/gui.yml', `${buildDir}/config/gui.yml`);
fs.copySync('./modules/cmd-here.bat', `${buildDir}/cmd-here.bat`);
fs.copySync('./modules/NotoSans-Regular.ttf', `${buildDir}/NotoSans-Regular.ttf`);
fs.copySync('./package.json', `${buildDir}/package.json`);
fs.copySync('./docs/', `${buildDir}/docs/`);
fs.copySync('./LICENSE.md', `${buildDir}/docs/LICENSE.md`);
fs.mkdirSync(`${buildDir}/config`, { recursive: true });
fs.mkdirSync(`${buildDir}/videos`, { recursive: true });
fs.mkdirSync(`${buildDir}/widevine`, { recursive: true });
fs.mkdirSync(`${buildDir}/playready`, { recursive: true });
fs.copyFileSync('./config/cli-defaults.yml', `${buildDir}/config/cli-defaults.yml`);
fs.copyFileSync('./config/dir-path.yml', `${buildDir}/config/dir-path.yml`);
fs.copyFileSync('./config/gui.yml', `${buildDir}/config/gui.yml`);
fs.copyFileSync('./modules/cmd-here.bat', `${buildDir}/cmd-here.bat`);
fs.copyFileSync('./modules/NotoSans-Regular.ttf', `${buildDir}/NotoSans-Regular.ttf`);
fs.copyFileSync('./package.json', `${buildDir}/package.json`);
fs.cpSync('./docs/', `${buildDir}/docs/`, { recursive: true });
fs.copyFileSync('./LICENSE.md', `${buildDir}/docs/LICENSE.md`);
if (gui) {
fs.copySync('./gui', `${buildDir}/gui`);
fs.copySync('./node_modules/open/xdg-open', `${buildDir}/xdg-open`);
fs.cpSync('./gui', `${buildDir}/gui`, { recursive: true });
fs.cpSync('./node_modules/open/xdg-open', `${buildDir}/xdg-open`, { recursive: true });
}
if (fs.existsSync(`${buildsDir}/${buildFull}.7z`)) {
fs.removeSync(`${buildsDir}/${buildFull}.7z`);
fs.unlinkSync(`${buildsDir}/${buildFull}.7z`);
}
// Generate bin-path.yml
const ext = buildType.startsWith('windows') ? '.exe' : '';
const binConf = {
ffmpeg: `ffmpeg${ext}`,
mkvmerge: `mkvmerge${ext}`,
mp4decrypt: `mp4decrypt${ext}`,
shaka: `shaka-packager${ext}`
};
fs.writeFileSync(
`${buildDir}/config/bin-path.yml`,
Object.entries(binConf)
.map(([key, value]) => `${key}: '${value}'`)
.join('\n') + '\n'
);
console.info(`[Build] Build completed`);
// Zipping

View file

@ -2,85 +2,105 @@ import fs from 'fs';
import { console } from './log';
import { workingDir } from './module.cfg-loader';
import path from 'path';
import { Device } from './playready/device';
import Cdm from './playready/cdm';
import { PSSH } from './playready/pssh';
import { KeyContainer, Session } from './widevine/license';
import { ofetch } from 'ofetch';
import * as reqModule from './module.fetch';
import Playready from 'node-playready';
import Widevine, { KeyContainer, LicenseType } from 'widevine';
const req = new reqModule.Req();
//read cdm files located in the same directory
let privateKey: Buffer = Buffer.from([]),
identifierBlob: Buffer = Buffer.from([]),
prd: Buffer = Buffer.from([]),
prd_cdm: Cdm | undefined;
let widevine: Widevine | undefined, playready: Playready | undefined;
export let cdm: 'widevine' | 'playready';
export let canDecrypt: boolean;
try {
const files_prd = fs.readdirSync(path.join(workingDir, 'playready'));
const prd_file_found = files_prd.find((f) => f.includes('.prd'));
const bgroup_file_found = files_prd.find((f) => f.includes('bgroupcert'));
const zgpriv_file_found = files_prd.find((f) => f.includes('zgpriv'));
const prd_file_found = files_prd.find((f) => f.endsWith('.prd'));
try {
if (prd_file_found) {
const file_prd = path.join(workingDir, 'playready', prd_file_found);
const stats = fs.statSync(file_prd);
if (stats.size < 1024 * 8 && stats.isFile()) {
const fileContents = fs.readFileSync(file_prd, {
encoding: 'utf8'
});
if (fileContents.includes('CERT')) {
prd = fs.readFileSync(file_prd);
const device = Device.loads(prd);
prd_cdm = Cdm.fromDevice(device);
}
const file_bgroup = path.join(workingDir, 'playready', 'bgroupcert.dat');
const file_zgpriv = path.join(workingDir, 'playready', 'zgpriv.dat');
if (bgroup_file_found && zgpriv_file_found) {
const bgroup_stats = fs.statSync(file_bgroup);
const zgpriv_stats = fs.statSync(file_zgpriv);
// Zgpriv is always 32 bytes long
if (bgroup_stats.isFile() && zgpriv_stats.isFile() && zgpriv_stats.size === 32) {
const bgroup = fs.readFileSync(file_bgroup);
const zgpriv = fs.readFileSync(file_zgpriv);
// Init Playready Client
playready = Playready.init(bgroup, zgpriv);
}
} else if (prd_file_found) {
const file_prd = path.join(workingDir, 'playready', prd_file_found);
const prd = fs.readFileSync(file_prd);
// Init Playready Client with PRD file
playready = Playready.initPRD(prd);
}
} catch (e) {
console.error('Error loading Playready CDM, ensure the CDM is provisioned as a V3 Device and not malformed. For more informations read the readme.');
prd = Buffer.from([]);
console.error('Error loading Playready CDM. For more informations read the readme.');
console.error(e);
}
const files_wvd = fs.readdirSync(path.join(workingDir, 'widevine'));
try {
let identifierBlob: Buffer = Buffer.from([]);
let privateKey: Buffer = Buffer.from([]);
let wvd: Buffer = Buffer.from([]);
// Searching files for client id blob and private key
files_wvd.forEach(function (file) {
file = path.join(workingDir, 'widevine', file);
const stats = fs.statSync(file);
if (stats.size < 1024 * 8 && stats.isFile()) {
const fileContents = fs.readFileSync(file, { encoding: 'utf8' });
// Handle client id blob
if (fileContents.includes('widevine_cdm_version') && fileContents.includes('oem_crypto_security_patch_level') && !fileContents.startsWith('WVD')) {
identifierBlob = fs.readFileSync(file);
}
// Handle private key
if (
(fileContents.includes('-----BEGIN RSA PRIVATE KEY-----') && fileContents.includes('-----END RSA PRIVATE KEY-----')) ||
(fileContents.includes('-----BEGIN PRIVATE KEY-----') && fileContents.includes('-----END PRIVATE KEY-----'))
) {
privateKey = fs.readFileSync(file);
}
if (fileContents.includes('widevine_cdm_version') && fileContents.includes('oem_crypto_security_patch_level') && !fileContents.startsWith('WVD')) {
identifierBlob = fs.readFileSync(file);
}
// Handle WVD file
if (fileContents.startsWith('WVD')) {
console.warn(
'Found WVD file in folder, AniDL currently only supports device_client_id_blob and device_private_key, make sure to have them in the widevine folder.'
);
wvd = fs.readFileSync(file);
}
}
});
// Error if no client blob but private key
if (identifierBlob.length === 0 && privateKey.length !== 0 && wvd.length === 0) {
console.error('Widevine initialization failed, found private key but not the client id blob!');
}
// Error if no private key but client blob
if (identifierBlob.length !== 0 && privateKey.length === 0 && wvd.length === 0) {
console.error('Widevine initialization failed, found client id blob but not the private key!');
}
// Init Widevine Client
if (identifierBlob.length !== 0 && privateKey.length !== 0) {
widevine = Widevine.init(identifierBlob, privateKey);
} else if (wvd.length !== 0) {
widevine = Widevine.initWVD(wvd);
}
} catch (e) {
console.error('Error loading Widevine CDM, malformed client blob or private key.');
privateKey = Buffer.from([]);
identifierBlob = Buffer.from([]);
}
if (privateKey.length !== 0 && identifierBlob.length !== 0) {
if (widevine) {
cdm = 'widevine';
canDecrypt = true;
} else if (prd.length !== 0) {
} else if (playready) {
cdm = 'playready';
canDecrypt = true;
} else if (privateKey.length === 0 && identifierBlob.length !== 0) {
console.warn('Private key missing');
canDecrypt = false;
} else if (identifierBlob.length === 0 && privateKey.length !== 0) {
console.warn('Identifier blob missing');
canDecrypt = false;
} else if (prd.length == 0) {
canDecrypt = false;
} else {
canDecrypt = false;
}
@ -90,101 +110,64 @@ try {
}
export async function getKeysWVD(pssh: string | undefined, licenseServer: string, authData: Record<string, string>): Promise<KeyContainer[]> {
if (!pssh || !canDecrypt) return [];
//pssh found in the mpd manifest
if (!pssh || !canDecrypt || !widevine) return [];
// pssh found in the mpd manifest
const psshBuffer = Buffer.from(pssh, 'base64');
//Create a new widevine session
const session = new Session({ privateKey, identifierBlob }, psshBuffer);
// Create a new widevine session
const session = widevine.createSession(psshBuffer, LicenseType.STREAMING);
//Generate license
const data = await ofetch(licenseServer, {
// Request License
const licReq = await req.getData(licenseServer, {
method: 'POST',
body: session.createLicenseRequest(),
headers: authData,
responseType: 'arrayBuffer'
}).catch((error) => {
if (error.status && error.statusText) {
console.error(`${error.name} ${error.status}: ${error.statusText}`);
} else {
console.error(`${error.name}: ${error.message}`);
}
if (!error.data) return;
const data = error.data instanceof ArrayBuffer ? new TextDecoder().decode(error.data) : error.data;
if (data) {
const docTitle = data.match(/<title>(.*)<\/title>/);
if (docTitle) {
console.error(docTitle[1]);
}
if (error.status && error.status != 404 && error.status != 403) {
console.error('Body:', data);
}
}
body: session.generateChallenge(),
headers: authData
});
if (data) {
//Parse License and return keys
const text = new TextDecoder().decode(data);
try {
const json = JSON.parse(text);
return session.parseLicense(Buffer.from(json['license'], 'base64')) as KeyContainer[];
} catch {
return session.parseLicense(Buffer.from(new Uint8Array(data))) as KeyContainer[];
}
} else {
console.error('License request failed');
if (!licReq.ok || !licReq.res) {
console.error('License fetch Failed!');
return [];
}
const lic = await licReq.res.arrayBuffer();
const lictext = new TextDecoder().decode(lic);
try {
const json = JSON.parse(lictext);
return session.parseLicense(Buffer.from(json['license'], 'base64')) as KeyContainer[];
} catch {
return session.parseLicense(Buffer.from(new Uint8Array(lic))) as KeyContainer[];
}
}
export async function getKeysPRD(pssh: string | undefined, licenseServer: string, authData: Record<string, string>): Promise<KeyContainer[]> {
if (!pssh || !canDecrypt || !prd_cdm) return [];
const pssh_parsed = new PSSH(pssh);
if (!pssh || !canDecrypt || !playready) return [];
//Create a new playready session
const session = prd_cdm.getLicenseChallenge(pssh_parsed.get_wrm_headers(true)[0]);
// Generate Playready challenge
const session = playready.generateChallenge(pssh);
//Generate license
const data = await ofetch(licenseServer, {
// Fetch license
const licReq = await req.getData(licenseServer, {
method: 'POST',
body: session,
headers: authData,
responseType: 'text'
}).catch((error) => {
if (error && error.status && error.statusText) {
console.error(`${error.name} ${error.status}: ${error.statusText}`);
} else {
console.error(`${error.name}: ${error.message}`);
}
if (!error.data) return;
const docTitle = error.data.match(/<title>(.*)<\/title>/);
if (docTitle) {
console.error(docTitle[1]);
}
if (error.status && error.status != 404 && error.status != 403) {
console.error('Body:', error.data);
}
headers: authData
});
if (data) {
//Parse License and return keys
try {
const keys = prd_cdm.parseLicense(data);
if (!licReq.ok || !licReq.res) {
console.error('License fetch Failed!');
return [];
}
return keys.map((k) => {
return {
kid: k.key_id,
key: k.key
};
});
} catch {
console.error('License parsing failed');
return [];
}
} else {
console.error('License request failed');
// Parse License and return keys
try {
const keys = playready.parseLicense(Buffer.from(await licReq.res.text(), 'utf-8'));
return keys.map((k) => {
return {
kid: k.kid,
key: k.key
};
});
} catch {
console.error('License parsing failed');
return [];
}
}

View file

@ -6,8 +6,11 @@ import url from 'url';
import { console } from './log';
import { ProgressData } from '../@types/messageHandler';
import { ofetch } from 'ofetch';
import Helper from './module.helper';
import * as reqModule from './module.fetch';
import { Manifest } from 'm3u8-parser';
const req = new reqModule.Req();
export type HLSCallback = (data: ProgressData) => unknown;
@ -31,7 +34,7 @@ type Key = {
};
export type HLSOptions = {
m3u8json: M3U8Json;
m3u8json: M3U8Json | Partial<Manifest>;
output?: string;
threads?: number;
retries?: number;
@ -50,7 +53,7 @@ type Data = {
total: number;
completed: number;
};
m3u8json: M3U8Json;
m3u8json: M3U8Json | Partial<Manifest>;
outputFile: string;
threads: number;
retries: number;
@ -88,7 +91,7 @@ class hlsDownload {
m3u8json: options.m3u8json,
outputFile: options.output || 'stream.ts',
threads: options.threads || 5,
retries: options.retries || 4,
retries: options.retries || 10,
offset: options.offset || 0,
baseurl: options.baseurl,
skipInit: options.skipInit,
@ -116,7 +119,7 @@ class hlsDownload {
if (age < 24 * 60 * 60 * 1000) {
console.info('Resume data found! Trying to resume...');
const resumeData = JSON.parse(await fs.readFile(`${fn}.resume`, 'utf-8'));
if (resumeData.total == this.data.m3u8json.segments.length && resumeData.completed != resumeData.total && !isNaN(resumeData.completed)) {
if (resumeData.total == this.data.m3u8json.segments?.length && resumeData.completed != resumeData.total && !isNaN(resumeData.completed)) {
console.info('Resume data is ok!');
this.data.offset = resumeData.completed;
this.data.isResume = true;
@ -124,7 +127,7 @@ class hlsDownload {
console.warn(' Resume data is wrong!');
console.warn({
resume: { total: resumeData.total, dled: resumeData.completed },
current: { total: this.data.m3u8json.segments.length }
current: { total: this.data.m3u8json.segments?.length }
});
}
} else {
@ -164,7 +167,7 @@ class hlsDownload {
this.data.dateStart = Date.now();
let segments = this.data.m3u8json.segments;
// download init part
if (segments[0].map && this.data.offset === 0 && !this.data.skipInit) {
if (segments?.[0].map && this.data.offset === 0 && !this.data.skipInit) {
console.info('Download and save init part...');
const initSeg = segments[0].map as Segment;
if (segments[0].key) {
@ -177,7 +180,7 @@ class hlsDownload {
`${fn}.resume`,
JSON.stringify({
completed: 0,
total: this.data.m3u8json.segments.length
total: this.data.m3u8json.segments?.length
})
);
console.info('Init part downloaded.');
@ -185,17 +188,17 @@ class hlsDownload {
console.error(`Part init download error:\n\t${e.message}`);
return { ok: false, parts: this.data.parts };
}
} else if (segments[0].map && this.data.offset === 0 && this.data.skipInit) {
} else if (segments?.[0].map && this.data.offset === 0 && this.data.skipInit) {
console.warn('Skipping init part can lead to broken video!');
}
// resuming ...
if (this.data.offset > 0) {
segments = segments.slice(this.data.offset);
segments = segments?.slice(this.data.offset);
console.info(`Resuming download from part ${this.data.offset + 1}...`);
this.data.parts.completed = this.data.offset;
}
// dl process
for (let p = 0; p < segments.length / this.data.threads; p++) {
for (let p = 0; p < (segments?.length ?? 0) / this.data.threads; p++) {
// set offsets
const offset = p * this.data.threads;
const dlOffset = offset + this.data.threads;
@ -204,9 +207,9 @@ class hlsDownload {
prq = new Map();
const res: any[] = [];
let errcnt = 0;
for (let px = offset; px < dlOffset && px < segments.length; px++) {
const curp = segments[px];
const key = curp.key as Key;
for (let px = offset; px < dlOffset && px < (segments?.length ?? 0); px++) {
const curp = segments?.[px];
const key = curp?.key as Key;
if (key && !krq.has(key.uri) && !this.data.keys[key.uri as string]) {
krq.set(key.uri, this.downloadKey(key, px, this.data.offset));
}
@ -217,8 +220,8 @@ class hlsDownload {
console.error(`Key ${er.p + 1} download error:\n\t${er.message}`);
return { ok: false, parts: this.data.parts };
}
for (let px = offset; px < dlOffset && px < segments.length; px++) {
const curp = segments[px] as Segment;
for (let px = offset; px < dlOffset && px < (segments?.length ?? 0); px++) {
const curp = segments?.[px] as Segment;
prq.set(px, () => this.downloadPart(curp, px, this.data.offset));
}
// Parallelized part download with retry logic and optional concurrency limit
@ -284,7 +287,7 @@ class hlsDownload {
}
}
// log downloaded
const totalSeg = segments.length + this.data.offset; // Add the sliced lenght back so the resume data will be correct even if an resumed download fails
const totalSeg = (segments?.length ?? 0) + this.data.offset; // Add the sliced lenght back so the resume data will be correct even if an resumed download fails
const downloadedSeg = dlOffset < totalSeg ? dlOffset : totalSeg;
this.data.parts.completed = downloadedSeg + this.data.offset;
const data = extFn.getDownloadInfo(this.data.dateStart, downloadedSeg, totalSeg, this.data.bytesDownloaded);
@ -343,6 +346,7 @@ class hlsDownload {
segOffset,
false
);
if (!part) throw Error();
// if (this.data.checkPartLength) {
// this.data.checkPartLength = false;
// console.warn(`Part ${segIndex + segOffset + 1}: can't check parts size!`);
@ -419,18 +423,20 @@ const extFn = {
const buffer = await fs.readFile(url.fileURLToPath(uri));
return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
}
// do request
return await ofetch(uri, {
const partReq = await req.getData(uri, {
method: 'GET',
headers: headers,
responseType: 'arrayBuffer',
retry: 0,
async onRequestError({ error }) {
const partType = isKey ? 'Key' : 'Part';
const partIndx = partIndex + 1 + segOffset;
console.warn(`%s %s: ${error.message}`, partType, partIndx);
}
headers: headers
});
if (!partReq.res || !partReq.ok) {
const partType = isKey ? 'Key' : 'Part';
const partIndx = partIndex + 1 + segOffset;
console.warn(`%s %s: ${partReq.error?.res?.statusText}`, partType, partIndx);
return;
}
return await partReq.res.arrayBuffer();
}
};

View file

@ -7,7 +7,7 @@ const logFolder = path.join(workingDir, 'logs');
const latest = path.join(logFolder, 'latest.log');
const makeLogFolder = () => {
if (!fs.existsSync(logFolder)) fs.mkdirSync(logFolder);
if (!fs.existsSync(logFolder)) fs.mkdirSync(logFolder, { recursive: true });
if (fs.existsSync(latest)) {
const stats = fs.statSync(latest);
fs.renameSync(latest, path.join(logFolder, `${stats.mtimeMs}.log`));

View file

@ -1,7 +1,9 @@
// api domains
const domain = {
cr_www: 'https://www.crunchyroll.com',
cr_api: 'https://api.crunchyroll.com',
cr_api: 'https://beta-api.crunchyroll.com',
cr_playback: 'https://cr-play-service.prd.crunchyrollsvc.com',
cr_license: 'https://cr-license-proxy.prd.crunchyrollsvc.com',
hd_www: 'https://www.hidive.com',
hd_api: 'https://api.hidive.com',
hd_new: 'https://dce-frontoffice.imggaming.com'
@ -28,8 +30,9 @@ export type APIType = {
cms_auth: string;
// Crunchyroll Headers
crunchyDefUserAgent: string;
crunchyDefHeader: Record<string, string>;
crunchyDefHeader: Record<string, any>;
crunchyAuthHeader: Record<string, string>;
crunchyAuthRefreshHeader: Record<string, string>;
// Hidive
hd_apikey: string;
hd_devName: string;
@ -50,27 +53,28 @@ const api: APIType = {
bundlejs: 'https://static.crunchyroll.com/vilos-v2/web/vilos/js/bundle.js',
//
// Crunchyroll API
basic_auth_token: 'ZGsxYndzemRyc3lkeTR1N2xvenE6bDl0SU1BdTlzTGc4ZjA4ajlfQkQ4eWZmQmZTSms0R0o=',
auth: `${domain.cr_www}/auth/v1/token`,
me: `${domain.cr_www}/accounts/v1/me`,
profile: `${domain.cr_www}/accounts/v1/me/profile`,
search: `${domain.cr_www}/content/v2/discover/search`,
content_cms: `${domain.cr_www}/content/v2/cms`,
content_music: `${domain.cr_www}/content/v2/music`,
browse: `${domain.cr_www}/content/v1/browse`,
browse_all_series: `${domain.cr_www}/content/v2/discover/browse`,
streaming_sessions: `${domain.cr_www}/playback/v1/sessions/streaming`,
drm_widevine: `${domain.cr_www}/license/v1/license/widevine`,
drm_playready: `${domain.cr_www}/license/v1/license/playReady`,
basic_auth_token: 'b2g0cnYxbHpsOXR5ZzF4b2NqZ2o6cDI4bEhwM3J1ZVV0ek1aNDRhZmNyam84MUNmaFZGemg=',
auth: `${domain.cr_api}/auth/v1/token`,
me: `${domain.cr_api}/accounts/v1/me`,
profile: `${domain.cr_api}/accounts/v1/me/profile`,
search: `${domain.cr_api}/content/v2/discover/search`,
content_cms: `${domain.cr_api}/content/v2/cms`,
content_music: `${domain.cr_api}/content/v2/music`,
browse: `${domain.cr_api}/content/v1/browse`,
browse_all_series: `${domain.cr_api}/content/v2/discover/browse`,
streaming_sessions: `${domain.cr_playback}/v1/sessions/streaming`,
drm_widevine: `https://cr-license-proxy.prd.crunchyrollsvc.com/v1/license/widevine`,
drm_playready: `https://cr-license-proxy.prd.crunchyrollsvc.com/v1/license/playReady`,
//
// Crunchyroll Bucket
cms_bucket: `${domain.cr_www}/cms/v2`,
cms_auth: `${domain.cr_www}/index/v2`,
cms_bucket: `${domain.cr_api}/cms/v2`,
cms_auth: `${domain.cr_api}/index/v2`,
//
// Crunchyroll Headers
crunchyDefUserAgent: 'Crunchyroll/ANDROIDTV/3.47.0_22277 (Android 12; en-US; SHIELD Android TV Build/SR1A.211012.001)',
crunchyDefUserAgent: 'Crunchyroll/ANDROIDTV/3.53.1_22290 (Android 12; en-US; SHIELD Android TV Build/SR1A.211012.001)',
crunchyDefHeader: {},
crunchyAuthHeader: {},
crunchyAuthRefreshHeader: {},
//
//
// Hidive
@ -89,7 +93,6 @@ const api: APIType = {
api.crunchyDefHeader = {
'User-Agent': api.crunchyDefUserAgent,
Accept: '*/*',
'Accept-Encoding': 'gzip',
Connection: 'Keep-Alive',
Host: 'www.crunchyroll.com'
@ -97,10 +100,19 @@ api.crunchyDefHeader = {
// set header
api.crunchyAuthHeader = {
Authorization: `Basic ${api.basic_auth_token}`,
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
Accept: 'application/json',
'Accept-Charset': 'UTF-8',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Request-Type': 'SignIn',
...api.crunchyDefHeader
};
// set header
api.crunchyAuthRefreshHeader = {
Accept: 'application/json',
'Accept-Charset': 'UTF-8',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
...api.crunchyDefHeader
};
export { domain, api };

View file

@ -1,4 +1,4 @@
import yargs, { Choices } from 'yargs';
import { Command } from 'commander';
import { args, AvailableMuxer, groups } from './module.args';
import { LanguageItem } from './module.langsData';
import { DownloadInfo } from '../@types/messageHandler';
@ -8,7 +8,7 @@ import { console } from './log';
import { CrunchyVideoPlayStreams, CrunchyAudioPlayStreams } from '../@types/enums';
import pj from '../package.json';
let argvC: {
export let argvC: {
[x: string]: unknown;
ccTag: string;
defaultAudio: LanguageItem;
@ -32,13 +32,13 @@ let argvC: {
auth: boolean | undefined;
dlFonts: boolean | undefined;
search: string | undefined;
'search-type': string;
searchType: string;
page: number | undefined;
locale: string;
new: boolean | undefined;
'movie-listing': string | undefined;
'show-raw': string | undefined;
'season-raw': string | undefined;
movieListing: string | undefined;
showRaw: string | undefined;
seasonRaw: string | undefined;
series: string | undefined;
s: string | undefined;
srz: string | undefined;
@ -46,7 +46,6 @@ let argvC: {
extid: string | undefined;
q: number;
x: number;
// kstream: number;
cstream: keyof typeof CrunchyVideoPlayStreams;
vstream: keyof typeof CrunchyVideoPlayStreams;
astream: keyof typeof CrunchyAudioPlayStreams;
@ -54,6 +53,7 @@ let argvC: {
partsize: number;
hslang: string;
dlsubs: string[];
skipMuxOnSubFail: boolean;
novids: boolean | undefined;
noaudio: boolean | undefined;
nosubs: boolean | undefined;
@ -82,7 +82,6 @@ let argvC: {
$0: string;
dlVideoOnce: boolean;
chapters: boolean;
// crapi: 'android' | 'web';
removeBumpers: boolean;
originalFontSize: boolean;
keepAllVideos: boolean;
@ -96,10 +95,51 @@ let argvC: {
scaledBorderAndShadowFix: boolean;
scaledBorderAndShadow: 'yes' | 'no';
originalScriptFix: boolean;
subtitleTimestampFix: boolean;
// Proxy
proxy: string;
proxyAll: boolean;
};
export type ArgvType = typeof argvC;
// This functions manages slight mismatches like -srz and returns it as --srz
const processArgv = () => {
const argv = [];
const arrayFlags = args.filter((a) => a.type === 'array').map((a) => `--${a.name}`);
for (let i = 0; i < process.argv.length; i++) {
const arg = process.argv[i];
if (/^-[a-zA-Z]{2,}$/.test(arg)) {
const found = args.find((a) => a.name === arg.substring(1) || a.alias === arg.substring(1));
if (found) {
argv.push(`--${found.name}`);
continue;
}
}
if (arrayFlags.includes(arg)) {
const col = [];
let n = i + 1;
while (n < process.argv.length && !process.argv[n].startsWith('-')) {
col.push(process.argv[n]);
n++;
}
argv.push(arg);
argv.push(col.join(' '));
i = n - 1;
continue;
}
argv.push(arg);
}
return argv;
};
const appArgv = (
cfg: {
[key: string]: unknown;
@ -107,41 +147,91 @@ const appArgv = (
isGUI = false
) => {
if (argvC) return argvC;
yargs(process.argv.slice(2));
const argv = getArgv(cfg, isGUI).parseSync();
argvC = argv;
return argv;
const argv = getCommander(cfg, isGUI).parse(processArgv());
const parsed = argv.opts() as ArgvType;
// Be sure that both vars (name and alias) are defined
for (const item of args) {
const name = item.name;
const alias = item.alias;
if (!alias) continue;
if (parsed[name] !== undefined) {
parsed[alias] = parsed[name];
}
if (parsed[alias] !== undefined) {
parsed[name] = parsed[alias];
}
}
if (!isGUI && (process.argv.length <= 2 || parsed.help)) {
argv.outputHelp();
process.exit(0);
}
argvC = parsed;
return parsed;
};
const overrideArguments = (cfg: { [key: string]: unknown }, override: Partial<typeof argvC>, isGUI = false) => {
const argv = getArgv(cfg, isGUI)
.middleware((ar) => {
for (const key of Object.keys(override)) {
ar[key] = override[key];
}
})
.parseSync();
argvC = argv;
const argv = getCommander(cfg, isGUI);
const baseArgv = [...processArgv()];
for (const [key, val] of Object.entries(override)) {
if (val === undefined) continue;
if (typeof val === 'boolean') {
if (val) baseArgv.push(key.length > 1 ? `--${key}` : `-${key}`);
} else {
baseArgv.push(key.length > 1 ? `--${key}` : `-${key}`, String(val));
}
}
const data = argv.parse(baseArgv);
const parsed = data.opts() as ArgvType;
// Be sure that both vars (name and alias) are defined
for (const item of args) {
const name = item.name;
const alias = item.alias;
if (!alias) continue;
if (parsed[name] !== undefined) {
parsed[alias] = parsed[name];
}
if (parsed[alias] !== undefined) {
parsed[name] = parsed[alias];
}
}
if (!isGUI && (process.argv.length <= 2 || parsed.help)) {
argv.outputHelp();
process.exit(0);
}
argvC = parsed;
};
export { appArgv, overrideArguments };
const getArgv = (cfg: { [key: string]: unknown }, isGUI: boolean) => {
const getCommander = (cfg: Record<string, unknown>, isGUI: boolean) => {
const program = new Command();
program
.name(process.platform === 'win32' ? 'aniDL.exe' : 'aniDL')
.description(pj.description)
.version(pj.version, '-v, --version', 'Show version')
.allowUnknownOption(false)
.allowExcessArguments(true);
const parseDefault = <T = unknown>(key: string, _default: T): T => {
if (Object.prototype.hasOwnProperty.call(cfg, key)) {
return cfg[key] as T;
} else return _default;
};
const argv = yargs
.parserConfiguration({
'duplicate-arguments-array': false,
'camel-case-expansion': false
})
.wrap(yargs.terminalWidth())
.usage('Usage: $0 [options]')
.version(pj.version)
.help(true);
//.strictOptions()
const data = args.map((a) => {
return {
...a,
@ -150,40 +240,93 @@ const getArgv = (cfg: { [key: string]: unknown }, isGUI: boolean) => {
default: typeof a.default === 'object' && !Array.isArray(a.default) ? parseDefault((a.default as any).name || a.name, (a.default as any).default) : a.default
};
});
for (const item of data)
argv.option(item.name, {
...item,
coerce: (value) => {
if (item.transformer) {
return item.transformer(value);
} else {
return value;
for (const item of data) {
const option = program.createOption(
(item.alias
? `${item.alias.length === 1 ? `-${item.alias}` : `--${item.alias}`}, ${item.name.length === 1 ? `-${item.name}` : `--${item.name}`}`
: item.name.length === 1
? `-${item.name}`
: `--${item.name}`) + (item.type === 'boolean' ? '' : ` <value>`),
item.describe ?? ''
);
if (item.default !== undefined) option.default(item.transformer ? item.transformer(item.default) : item.default);
const optionNames = [...args.map((a) => `--${a.name}`), ...args.map((a) => (a.alias ? `-${a.alias}` : null)).filter(Boolean)];
option.argParser((value) => {
if (item.transformer) return item.transformer(value);
// Prevent from passing other options als value for option
if (value && typeof value === 'string' && value.startsWith('-') && optionNames.includes(value)) return undefined;
if (item.type === 'boolean') {
if (value === undefined) return true;
if (value === 'true') return true;
if (value === 'false') return false;
return Boolean(value);
}
if (item.type === 'array') {
if (typeof value === 'string' && value.includes(',')) {
return value.split(',').map((v) => v.trim());
}
},
choices: item.name === 'service' && isGUI ? undefined : (item.choices as unknown as Choices)
if (typeof value === 'string' && value.includes(' ')) {
return value.split(' ').map((v) => v.trim());
}
return Array.isArray(value) ? value : [value];
}
if (item.type === 'number') {
const num = Number(value);
return Number.isFinite(num) ? num : 0;
}
if (item.type === 'string') {
if (value === undefined) return undefined;
return value;
}
if (item.choices && !(isGUI && item.name === 'service')) {
if (!item.choices.includes(value)) {
console.error(`Invalid value '${value}' for --${item.name}. Allowed: ${item.choices.join(', ')}`);
process.exit(1);
}
}
return value;
});
// Custom logic for suggesting corrections for misspelled options
argv.middleware((argv: Record<string, any>) => {
// List of valid options
const validOptions = [...args.map((a) => a.name), ...(args.map((a) => a.alias).filter((alias) => alias !== undefined) as string[])];
const unknownOptions = Object.keys(argv).filter((key) => !validOptions.includes(key) && key !== '_' && key !== '$0'); // Filter out known options
program.addOption(option);
}
const suggestedOptions: Record<string, boolean> = {};
unknownOptions.forEach((actualOption) => {
const closestOption = validOptions.find((option) => {
const levenVal = leven(option, actualOption);
return levenVal <= 2 && levenVal > 0;
// Custom logic for suggesting corrections for misspelled options
program.hook('preAction', (_, command) => {
const used = command.parent?.args || [];
const validOptions = [...args.map((a) => a.name), ...args.map((a) => a.alias).filter((a): a is string => a !== undefined)];
const unknownOptions = used.filter((arg) => arg.startsWith('-'));
const suggestions: Record<string, boolean> = {};
unknownOptions.forEach((opt) => {
const cleaned = opt.replace(/^-+/, '');
const closest = validOptions.find((vo) => {
const dist = leven(vo, cleaned);
return dist <= 2 && dist > 0;
});
if (closestOption && !suggestedOptions[closestOption]) {
suggestedOptions[closestOption] = true;
console.info(`Unknown option ${actualOption}, did you mean ${closestOption}?`);
} else if (!suggestedOptions[actualOption]) {
suggestedOptions[actualOption] = true;
console.info(`Unknown option ${actualOption}`);
if (closest && !suggestions[closest]) {
console.info(`Unknown option ${opt}, did you mean --${closest}?`);
suggestions[closest] = true;
} else if (!suggestions[cleaned]) {
console.info(`Unknown option ${opt}`);
suggestions[cleaned] = true;
}
});
});
return argv as unknown as yargs.Argv<typeof argvC>;
return program;
};

View file

@ -295,8 +295,7 @@ const args: TAppArg<boolean | number | string | unknown[]>[] = [
alias: 'cs',
service: ['crunchy'],
type: 'string',
describe:
'(Please use --vstream and --astream instead, this will deprecate soon) Select a specific Crunchyroll playback endpoint by device. Since Crunchyroll has started rolling out their new VBR encodes, we highly recommend using a TV endpoint (e.g. vidaa, samsungtv, lgtv, rokutv, chromecast, firetv, androidtv) to access the old CBR encodes. Please note: The older encodes do not include the new 192 kbps audio, the new audio is only available with the new VBR encodes.',
describe: '(Please use --vstream and --astream instead, this will deprecate soon)',
choices: [...Object.keys(CrunchyVideoPlayStreams), 'none'],
docDescribe: true,
usage: '${device}'
@ -307,7 +306,7 @@ const args: TAppArg<boolean | number | string | unknown[]>[] = [
alias: 'vs',
service: ['crunchy'],
type: 'string',
describe: 'Select a specific Crunchyroll video playback endpoint by device.',
describe: 'Select a specific Crunchyroll video playback endpoint by device. androidtv provides the best video (CBR).',
choices: [...Object.keys(CrunchyVideoPlayStreams), 'none'],
default: {
default: 'androidtv'
@ -321,7 +320,7 @@ const args: TAppArg<boolean | number | string | unknown[]>[] = [
alias: 'as',
service: ['crunchy'],
type: 'string',
describe: 'Select a specific Crunchyroll audio playback endpoint by device.',
describe: 'Select a specific Crunchyroll audio playback endpoint by device. android provides the best audio (192 kbps).',
choices: [...Object.keys(CrunchyAudioPlayStreams), 'none'],
default: {
default: 'android'
@ -372,6 +371,18 @@ const args: TAppArg<boolean | number | string | unknown[]>[] = [
},
usage: '${sub1} ${sub2}'
},
{
name: 'skipMuxOnSubFail',
group: 'dl',
describe: 'Skips muxing when a subtitle download fails.',
docDescribe: true,
service: ['all'],
type: 'boolean',
usage: '',
default: {
default: false
}
},
{
name: 'noASSConv',
group: 'dl',
@ -457,6 +468,19 @@ const args: TAppArg<boolean | number | string | unknown[]>[] = [
default: true
}
},
{
name: 'subtitleTimestampFix',
group: 'dl',
describe:
'Fixes subtitle dialogues that go over the video length (deletes dialogues where start is over video length and updates the end timestamp when end is over video length).',
docDescribe: true,
service: ['crunchy'],
type: 'boolean',
usage: '',
default: {
default: false
}
},
{
name: 'novids',
group: 'dl',
@ -1002,6 +1026,30 @@ const args: TAppArg<boolean | number | string | unknown[]>[] = [
default: {
default: 'cc'
}
},
{
name: 'proxy',
describe: 'Uses Proxy on geo-restricted or geo-defining endpoints (e.g. https://127.0.0.1:1080 or http://127.0.0.1:1080)',
docDescribe: true,
group: 'util',
service: ['all'],
type: 'string',
usage: '${proxy_url}',
default: {
default: ''
}
},
{
name: 'proxyAll',
describe: 'Proxies everything, not recommended. Proxy needs to be defined.',
docDescribe: true,
group: 'util',
service: ['all'],
type: 'boolean',
usage: '',
default: {
default: false
}
}
];

View file

@ -1,6 +1,6 @@
import path from 'path';
import yaml from 'yaml';
import fs from 'fs-extra';
import fs from 'fs';
import { lookpath } from 'lookpath';
import { console } from './log';
import { GuiState } from '../@types/messageHandler';
@ -37,7 +37,7 @@ const tokenFile = {
};
export const ensureConfig = () => {
if (!fs.existsSync(path.join(workingDir, 'config'))) fs.mkdirSync(path.join(workingDir, 'config'));
if (!fs.existsSync(path.join(workingDir, 'config'))) fs.mkdirSync(path.join(workingDir, 'config'), { recursive: true });
if (process.env.contentDirectory)
[binCfgFile, dirCfgFile, cliCfgFile, guiCfgFile].forEach((a) => {
if (!fs.existsSync(`${a}.yml`)) fs.copyFileSync(path.join(__dirname, '..', 'config', `${path.basename(a)}.yml`), `${a}.yml`);
@ -66,7 +66,7 @@ export type WriteObjects = {
const writeYamlCfgFile = <T extends keyof WriteObjects>(file: T, data: WriteObjects[T]) => {
const fn = path.join(workingDir, 'config', `${file}.yml`);
if (fs.existsSync(fn)) fs.removeSync(fn);
if (fs.existsSync(fn)) fs.unlinkSync(fn);
fs.writeFileSync(fn, yaml.stringify(data));
};
@ -85,7 +85,6 @@ export type ConfigObject = {
bin: {
ffmpeg?: string;
mkvmerge?: string;
ffprobe?: string;
mp4decrypt?: string;
shaka?: string;
};
@ -131,7 +130,7 @@ const loadCfg = (): ConfigObject => {
}
if (!fs.existsSync(defaultCfg.dir.content)) {
try {
fs.ensureDirSync(defaultCfg.dir.content);
fs.mkdirSync(defaultCfg.dir.content, { recursive: true });
} catch (e) {
console.error('Content directory not accessible!');
return defaultCfg;
@ -150,7 +149,6 @@ const loadBinCfg = async () => {
const defaultBin = {
ffmpeg: 'ffmpeg',
mkvmerge: 'mkvmerge',
ffprobe: 'ffprobe',
mp4decrypt: 'mp4decrypt',
shaka: 'shaka-packager'
};
@ -192,7 +190,7 @@ const loadCRSession = () => {
const saveCRSession = (data: Record<string, unknown>) => {
const cfgFolder = path.dirname(sessCfgFile.cr);
try {
fs.ensureDirSync(cfgFolder);
fs.mkdirSync(cfgFolder, { recursive: true });
fs.writeFileSync(`${sessCfgFile.cr}.yml`, yaml.stringify(data));
} catch (e) {
console.error("Can't save session file to disk!");
@ -210,7 +208,7 @@ const loadCRToken = () => {
const saveCRToken = (data: Record<string, unknown>) => {
const cfgFolder = path.dirname(tokenFile.cr);
try {
fs.ensureDirSync(cfgFolder);
fs.mkdirSync(cfgFolder, { recursive: true });
fs.writeFileSync(`${tokenFile.cr}.yml`, yaml.stringify(data));
} catch (e) {
console.error("Can't save token file to disk!");
@ -228,7 +226,7 @@ const loadADNToken = () => {
const saveADNToken = (data: Record<string, unknown>) => {
const cfgFolder = path.dirname(tokenFile.adn);
try {
fs.ensureDirSync(cfgFolder);
fs.mkdirSync(cfgFolder, { recursive: true });
fs.writeFileSync(`${tokenFile.adn}.yml`, yaml.stringify(data));
} catch (e) {
console.error("Can't save token file to disk!");
@ -251,7 +249,7 @@ const loadHDSession = () => {
const saveHDSession = (data: Record<string, unknown>) => {
const cfgFolder = path.dirname(sessCfgFile.hd);
try {
fs.ensureDirSync(cfgFolder);
fs.mkdirSync(cfgFolder, { recursive: true });
fs.writeFileSync(`${sessCfgFile.hd}.yml`, yaml.stringify(data));
} catch (e) {
console.error("Can't save session file to disk!");
@ -269,7 +267,7 @@ const loadHDToken = () => {
const saveHDToken = (data: Record<string, unknown>) => {
const cfgFolder = path.dirname(tokenFile.hd);
try {
fs.ensureDirSync(cfgFolder);
fs.mkdirSync(cfgFolder, { recursive: true });
fs.writeFileSync(`${tokenFile.hd}.yml`, yaml.stringify(data));
} catch (e) {
console.error("Can't save token file to disk!");
@ -279,7 +277,7 @@ const saveHDToken = (data: Record<string, unknown>) => {
const saveHDProfile = (data: Record<string, unknown>) => {
const cfgFolder = path.dirname(hdPflCfgFile);
try {
fs.ensureDirSync(cfgFolder);
fs.mkdirSync(cfgFolder, { recursive: true });
fs.writeFileSync(`${hdPflCfgFile}.yml`, yaml.stringify(data));
} catch (e) {
console.error("Can't save profile file to disk!");
@ -318,7 +316,7 @@ const loadNewHDToken = () => {
const saveNewHDToken = (data: Record<string, unknown>) => {
const cfgFolder = path.dirname(tokenFile.hdNew);
try {
fs.ensureDirSync(cfgFolder);
fs.mkdirSync(cfgFolder, { recursive: true });
fs.writeFileSync(`${tokenFile.hdNew}.yml`, yaml.stringify(data));
} catch (e) {
console.error("Can't save token file to disk!");

View file

@ -1,6 +1,10 @@
import * as yamlCfg from './module.cfg-loader';
import * as yargs from './module.app-args';
import { console } from './log';
import { connect } from 'puppeteer-real-browser';
import { argvC } from './module.app-args';
import { ProxyAgent, fetch, RequestInit } from 'undici';
export type FetchParams = Partial<RequestInit & CustomParams>;
export type Params = {
method?: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE';
@ -10,6 +14,10 @@ export type Params = {
followRedirect?: 'follow' | 'error' | 'manual';
};
type CustomParams = {
useProxy: boolean;
};
type GetDataResponse = {
ok: boolean;
res?: Response;
@ -21,49 +29,29 @@ type GetDataResponse = {
};
};
function hasDisplay(): boolean {
if (process.platform === 'linux') {
return !!process.env.DISPLAY || !!process.env.WAYLAND_DISPLAY;
}
// Win and Mac true by default
return true;
}
// function hasDisplay(): boolean {
// if (process.platform === 'linux') {
// return !!process.env.DISPLAY || !!process.env.WAYLAND_DISPLAY;
// }
// // Win and Mac true by default
// return true;
// }
// req
export class Req {
private sessCfg: string;
private service: 'cr' | 'hd' | 'adn';
private session: Record<
string,
{
value: string;
expires: Date;
path: string;
domain: string;
secure: boolean;
'Max-Age'?: string;
}
> = {};
private cfgDir = yamlCfg.cfgDir;
private curl: boolean | string = false;
private debug: boolean;
public argv: typeof argvC;
constructor(
private domain: Record<string, unknown>,
private debug: boolean,
private nosess = false,
private type: 'cr' | 'hd' | 'adn'
) {
this.sessCfg = yamlCfg.sessCfgFile[type];
this.service = type;
constructor() {
const cfg = yamlCfg.loadCfg();
this.argv = yargs.appArgv(cfg.cli, process.env.isGUI ? true : false);
this.debug = this.argv.debug ?? false;
}
async getData(durl: string, params?: RequestInit): Promise<GetDataResponse> {
params = params || {};
// options
async getData(durl: string, params: Partial<RequestInit & CustomParams> = {}): Promise<GetDataResponse> {
const options: RequestInit = {
method: params.method ? params.method : 'GET'
};
// additional params
if (params.headers) {
options.headers = params.headers;
}
@ -73,56 +61,68 @@ export class Req {
if (typeof params.redirect == 'string') {
options.redirect = params.redirect;
}
// debug
// Proxy Handler
let dispatcher: ProxyAgent | undefined;
const validProxy = this.argv.proxy ? this.isValidProxyUrl(this.argv.proxy) : false;
if ((params.useProxy || this.argv.proxyAll) && this.argv.proxy && validProxy) {
dispatcher = new ProxyAgent(this.argv.proxy);
} else if ((params.useProxy || this.argv.proxyAll) && this.argv.proxy && !validProxy) {
console.warn('[Fetch] Provided invalid Proxy URL, not proxying traffic.');
}
// Debug
if (this.debug) {
console.debug('[DEBUG] FETCH OPTIONS:');
console.debug(options);
}
// try do request
try {
const res = await fetch(durl, options);
const res = await fetch(durl, { ...options, dispatcher: dispatcher });
if (!res.ok) {
console.error(`${res.status}: ${res.statusText}`);
const body = await res.text();
const docTitle = body.match(/<title>(.*)<\/title>/);
if (body && docTitle) {
if (docTitle[1] === 'Just a moment...' && durl.includes('crunchyroll') && hasDisplay()) {
console.warn('Cloudflare triggered, trying to get cookies...');
// if (docTitle[1] === 'Just a moment...' && durl.includes('crunchyroll') && hasDisplay()) {
// console.warn('Cloudflare triggered, trying to get cookies...');
const { page } = await connect({
headless: false,
turnstile: true
});
// const { page } = await connect({
// headless: false,
// turnstile: true
// });
await page.goto('https://www.crunchyroll.com/', {
waitUntil: 'networkidle2'
});
// await page.goto('https://www.crunchyroll.com/', {
// waitUntil: 'networkidle2'
// });
await page.waitForRequest('https://www.crunchyroll.com/auth/v1/token');
// await page.waitForRequest('https://www.crunchyroll.com/auth/v1/token');
const cookies = await page.cookies();
// const cookies = await page.cookies();
await page.close();
// await page.close();
params.headers = {
...params.headers,
Cookie: cookies.map((c) => `${c.name}=${c.value}`).join('; '),
'Set-Cookie': cookies.map((c) => `${c.name}=${c.value}`).join('; ')
};
// params.headers = {
// ...params.headers,
// Cookie: cookies.map((c) => `${c.name}=${c.value}`).join('; '),
// 'Set-Cookie': cookies.map((c) => `${c.name}=${c.value}`).join('; ')
// };
(params as any).headers['User-Agent'] = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36';
// (params as any).headers['User-Agent'] = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36';
return await this.getData(durl, params);
} else {
console.error(docTitle[1]);
}
// return await this.getData(durl, params);
// } else {
// console.error(docTitle[1]);
// }
console.error(docTitle[1]);
} else {
console.error(body);
}
}
return {
ok: res.ok,
res,
res: res as any,
headers: params.headers as Record<string, string>
};
} catch (_error) {
@ -149,33 +149,28 @@ export class Req {
};
}
}
}
export function buildProxy(proxyBaseUrl: string, proxyAuth: string) {
if (!proxyBaseUrl.match(/^(https?|socks4|socks5):/)) {
proxyBaseUrl = 'http://' + proxyBaseUrl;
}
const proxyCfg = new URL(proxyBaseUrl);
let proxyStr = `${proxyCfg.protocol}//`;
if (typeof proxyCfg.hostname != 'string' || proxyCfg.hostname == '') {
throw new Error('[ERROR] Hostname and port required for proxy!');
}
if (proxyAuth && typeof proxyAuth == 'string' && proxyAuth.match(':')) {
proxyCfg.username = proxyAuth.split(':')[0];
proxyCfg.password = proxyAuth.split(':')[1];
proxyStr += `${proxyCfg.username}:${proxyCfg.password}@`;
}
proxyStr += proxyCfg.hostname;
if (!proxyCfg.port && proxyCfg.protocol == 'http:') {
proxyStr += ':80';
} else if (!proxyCfg.port && proxyCfg.protocol == 'https:') {
proxyStr += ':443';
}
return proxyStr;
private isValidProxyUrl(proxyUrl: string): boolean {
try {
if (!proxyUrl.match(/^(https?|socks4|socks5):\/\//)) {
return false;
}
const url = new URL(proxyUrl);
if (!url.hostname) return false;
if (!['http:', 'https:'].includes(url.protocol)) {
return false;
}
if (url.port && (!/^\d+$/.test(url.port) || Number(url.port) < 1 || Number(url.port) > 65535)) {
return false;
}
return true;
} catch {
return false;
}
}
}

View file

@ -25,18 +25,26 @@ export default class Helper {
static cleanupFilename(n: string) {
/* eslint-disable no-useless-escape, no-control-regex */
const fixingChar = '_';
const illegalRe = /[\/\?<>\\:\*\|":]/g;
// Smart Replacer
const rep: Record<string, string> = {
'/': '',
'\\': '',
':': '',
'*': '',
'?': '',
'"': "'",
'<': '',
'>': ''
};
n = n.replace(/[\/\\:\*\?"<>\|]/g, (ch) => rep[ch] || '_');
// Old Replacer
const controlRe = /[\x00-\x1f\x80-\x9f]/g;
const reservedRe = /^\.+$/;
const windowsReservedRe = /^(con|prn|aux|nul|com[0-9]|lpt[0-9])(\..*)?$/i;
const windowsTrailingRe = /[\. ]+$/;
return n
.replace(illegalRe, fixingChar)
.replace(controlRe, fixingChar)
.replace(reservedRe, fixingChar)
.replace(windowsReservedRe, fixingChar)
.replace(windowsTrailingRe, fixingChar);
return n.replace(controlRe, '_').replace(reservedRe, '_').replace(windowsReservedRe, '_').replace(windowsTrailingRe, '_');
}
static exec(

View file

@ -1,14 +1,14 @@
import * as iso639 from 'iso-639';
import * as yamlCfg from './module.cfg-loader';
import { fontFamilies, fontMime } from './module.fontsData';
import path from 'path';
import fs from 'fs';
import fsp from 'fs/promises';
import { LanguageItem } from './module.langsData';
import { AvailableMuxer } from './module.args';
import { console } from './log';
import ffprobe from 'ffprobe';
import Helper from './module.helper';
import { convertChaptersToFFmpegFormat } from './module.ffmpegChapter';
import { mediaInfoFactory } from 'mediainfo.js';
export type MergerInput = {
path: string;
@ -67,13 +67,26 @@ class Merger {
public async createDelays() {
//Don't bother scanning it if there is only 1 vna stream
if (this.options.videoAndAudio.length > 1) {
const bin = await yamlCfg.loadBinCfg();
const vnas = this.options.videoAndAudio;
//get and set durations on each videoAndAudio Stream
for (const [vnaIndex, vna] of vnas.entries()) {
const streamInfo = await ffprobe(vna.path, { path: bin.ffprobe as string });
const videoInfo = streamInfo.streams.filter((stream) => stream.codec_type == 'video');
vnas[vnaIndex].duration = parseInt(videoInfo[0].duration as string);
const file = await fsp.open(vna.path);
const { size } = await fsp.stat(vna.path);
// Mediainfo
const mediaInfo = await mediaInfoFactory();
const result = await mediaInfo.analyzeData(
() => size,
async (size, offset) => {
const buf = Buffer.alloc(size);
const { bytesRead } = await file.read(buf, 0, size, offset);
return buf.subarray(0, bytesRead);
}
);
await file.close();
const videoInfo = result?.media?.track?.filter((stream) => stream['@type'] == 'Video');
vnas[vnaIndex].duration = videoInfo?.[0].Duration;
}
//Sort videoAndAudio streams by duration (shortest first)
vnas.sort((a, b) => {

View file

@ -1,6 +1,10 @@
import { parse as mpdParse } from 'mpd-parser';
import { LanguageItem, findLang, languages } from './module.langsData';
import { console } from './log';
import * as reqModule from './module.fetch';
import { FetchParams } from './module.fetch';
const req = new reqModule.Req();
type Segment = {
uri: string;
@ -77,13 +81,15 @@ export async function parse(manifest: string, language?: LanguageItem, url?: str
if (!Object.prototype.hasOwnProperty.call(ret, host)) ret[host] = { audio: [], video: [] };
if (playlist.sidx && playlist.segments.length == 0) {
const options: RequestInit = {
const options: FetchParams = {
method: 'head'
};
const item = await fetch(playlist.sidx.uri, options);
if (!item.ok)
console.warn(`${item.status}: ${item.statusText}, Unable to fetch byteLength for audio stream ${Math.round(playlist.attributes.BANDWIDTH / 1024)}KiB/s`);
const byteLength = parseInt(item.headers.get('content-length') as string);
const itemReq = await req.getData(playlist.sidx.uri, options);
if (!itemReq.res || !itemReq.ok)
console.warn(
`${itemReq.error?.res?.status}: ${itemReq.error?.res?.statusText}, Unable to fetch byteLength for audio stream ${Math.round(playlist.attributes.BANDWIDTH / 1024)}KiB/s`
);
const byteLength = parseInt(itemReq.res?.headers?.get('content-length') as string);
let currentByte = playlist.sidx.map.byterange.length;
while (currentByte <= byteLength) {
playlist.segments.push({
@ -156,15 +162,15 @@ export async function parse(manifest: string, language?: LanguageItem, url?: str
if (!Object.prototype.hasOwnProperty.call(ret, host)) ret[host] = { audio: [], video: [] };
if (playlist.sidx && playlist.segments.length == 0) {
const options: RequestInit = {
const options: FetchParams = {
method: 'head'
};
const item = await fetch(playlist.sidx.uri, options);
if (!item.ok)
const itemReq = await req.getData(playlist.sidx.uri, options);
if (!itemReq.res || !itemReq.ok)
console.warn(
`${item.status}: ${item.statusText}, Unable to fetch byteLength for video stream ${playlist.attributes.RESOLUTION?.height}x${playlist.attributes.RESOLUTION?.width}@${Math.round(playlist.attributes.BANDWIDTH / 1024)}KiB/s`
`${itemReq.error?.res?.status}: ${itemReq.error?.res?.statusText}, Unable to fetch byteLength for video stream ${playlist.attributes.RESOLUTION?.height}x${playlist.attributes.RESOLUTION?.width}@${Math.round(playlist.attributes.BANDWIDTH / 1024)}KiB/s`
);
const byteLength = parseInt(item.headers.get('content-length') as string);
const byteLength = parseInt(itemReq.res?.headers?.get('content-length') as string);
let currentByte = playlist.sidx.map.byterange.length;
while (currentByte <= byteLength) {
playlist.segments.push({
@ -223,7 +229,3 @@ export async function parse(manifest: string, language?: LanguageItem, url?: str
return ret;
}
function arrayBufferToBase64(buffer: Uint8Array): string {
return Buffer.from(buffer).toString('base64');
}

View file

@ -1,16 +1,18 @@
import fs from 'fs';
import { GithubTag, TagCompare } from '../@types/github';
import path from 'path';
import { UpdateFile } from '../@types/updateFile';
import packageJson from '../package.json';
import { CompilerOptions, transpileModule } from 'typescript';
import tsConfig from '../tsconfig.json';
import fsextra from 'fs-extra';
import fs from 'fs';
import { workingDir } from './module.cfg-loader';
import { console } from './log';
import Helper from './module.helper';
import * as reqModule from './module.fetch';
const updateFilePlace = path.join(workingDir, 'config', 'updates.json');
const req = new reqModule.Req();
const updateIgnore = [
'*.d.ts',
'.git',
@ -58,8 +60,13 @@ export default async (force = false) => {
}
}
console.info('Checking for updates...');
const tagRequest = await fetch('https://api.github.com/repos/anidl/multi-downloader-nx/tags');
const tags = JSON.parse(await tagRequest.text()) as GithubTag[];
const tagRequest = await req.getData('https://api.github.com/repos/anidl/multi-downloader-nx/tags');
if (!tagRequest.res || !tagRequest.ok) {
console.info('No new tags found');
return done();
}
const tags = JSON.parse((await tagRequest.res.text()) as string) as GithubTag[];
if (tags.length > 0) {
const newer = tags.filter((a) => {
@ -72,9 +79,13 @@ export default async (force = false) => {
return done();
}
const newest = newer.sort((a, b) => (a.name < b.name ? 1 : a.name > b.name ? -1 : 0))[0];
const compareRequest = await fetch(`https://api.github.com/repos/anidl/multi-downloader-nx/compare/${packageJson.version}...${newest.name}`);
const compareRequest = await req.getData(`https://api.github.com/repos/anidl/multi-downloader-nx/compare/${packageJson.version}...${newest.name}`);
if (!compareRequest.res || !compareRequest.ok) {
console.info('No new tags found');
return done();
}
const compareJSON = JSON.parse(await compareRequest.text()) as TagCompare;
const compareJSON = JSON.parse(await compareRequest.res.text()) as TagCompare;
console.info(`You are behind by ${compareJSON.ahead_by} releases!`);
const changedFiles = compareJSON.files
@ -109,7 +120,7 @@ export default async (force = false) => {
const isTSX = a.filename.endsWith('tsx');
const ret = {
path: a.filename.slice(0, isTSX ? -3 : -2) + `js${isTSX ? 'x' : ''}`,
content: transpileModule(await (await fetch(a.raw_url)).text(), {
content: transpileModule((await (await req.getData(a.raw_url)).res?.text()) ?? '', {
compilerOptions: tsConfig.compilerOptions as unknown as CompilerOptions
}).outputText,
type: a.status === 'modified' ? ApplyType.UPDATE : a.status === 'added' ? ApplyType.ADD : ApplyType.DELETE
@ -119,7 +130,7 @@ export default async (force = false) => {
} else {
const ret = {
path: a.filename,
content: await (await fetch(a.raw_url)).text(),
content: (await (await req.getData(a.raw_url)).res?.text()) ?? '',
type: a.status === 'modified' ? ApplyType.UPDATE : a.status === 'added' ? ApplyType.ADD : ApplyType.DELETE
};
console.info('✓ Got %s', ret.path);
@ -130,7 +141,7 @@ export default async (force = false) => {
changesToApply.forEach((a) => {
try {
fsextra.ensureDirSync(path.dirname(a.path));
fs.mkdirSync(path.dirname(a.path), { recursive: true });
fs.writeFileSync(path.join(__dirname, '..', a.path), a.content);
console.info('✓ Written %s', a.path);
} catch (er) {

View file

@ -1,450 +0,0 @@
import * as fs from 'fs';
import { createHash } from 'crypto';
import { Parser } from 'binary-parser-encoder';
import ECCKey from './ecc_key';
import { console } from '../log';
function alignUp(length: number, alignment: number): number {
return Math.ceil(length / alignment) * alignment;
}
export class BCertStructs {
static DrmBCertBasicInfo = new Parser()
.buffer('cert_id', { length: 16 })
.uint32be('security_level')
.uint32be('flags')
.uint32be('cert_type')
.buffer('public_key_digest', { length: 32 })
.uint32be('expiration_date')
.buffer('client_id', { length: 16 });
static DrmBCertDomainInfo = new Parser()
.buffer('service_id', { length: 16 })
.buffer('account_id', { length: 16 })
.uint32be('revision_timestamp')
.uint32be('domain_url_length')
.buffer('domain_url', {
length: function () {
return alignUp((this as any).domain_url_length, 4);
}
});
static DrmBCertPCInfo = new Parser().uint32be('security_version');
static DrmBCertDeviceInfo = new Parser().uint32be('max_license').uint32be('max_header').uint32be('max_chain_depth');
static DrmBCertFeatureInfo = new Parser().uint32be('feature_count').array('features', {
type: 'uint32be',
length: 'feature_count'
});
static CertKey = new Parser()
.uint16be('type')
.uint16be('length')
.uint32be('flags')
.buffer('key', {
length: function () {
return (this as any).length / 8;
}
})
.uint32be('usages_count')
.array('usages', {
type: 'uint32be',
length: 'usages_count'
});
static DrmBCertKeyInfo = new Parser().uint32be('key_count').array('cert_keys', {
type: BCertStructs.CertKey,
length: 'key_count'
});
static DrmBCertManufacturerInfo = new Parser()
.uint32be('flags')
.uint32be('manufacturer_name_length')
.buffer('manufacturer_name', {
length: function () {
return alignUp((this as any).manufacturer_name_length, 4);
}
})
.uint32be('model_name_length')
.buffer('model_name', {
length: function () {
return alignUp((this as any).model_name_length, 4);
}
})
.uint32be('model_number_length')
.buffer('model_number', {
length: function () {
return alignUp((this as any).model_number_length, 4);
}
});
static DrmBCertSignatureInfo = new Parser()
.uint16be('signature_type')
.uint16be('signature_size')
.buffer('signature', { length: 'signature_size' })
.uint32be('signature_key_size')
.buffer('signature_key', {
length: function () {
return (this as any).signature_key_size / 8;
}
});
static DrmBCertSilverlightInfo = new Parser().uint32be('security_version').uint32be('platform_identifier');
static DrmBCertMeteringInfo = new Parser()
.buffer('metering_id', { length: 16 })
.uint32be('metering_url_length')
.buffer('metering_url', {
length: function () {
return alignUp((this as any).metering_url_length, 4);
}
});
static DrmBCertExtDataSignKeyInfo = new Parser()
.uint16be('key_type')
.uint16be('key_length')
.uint32be('flags')
.buffer('key', {
length: function () {
return (this as any).length / 8;
}
});
static BCertExtDataRecord = new Parser().uint32be('data_size').buffer('data', {
length: 'data_size'
});
static DrmBCertExtDataSignature = new Parser().uint16be('signature_type').uint16be('signature_size').buffer('signature', {
length: 'signature_size'
});
static BCertExtDataContainer = new Parser()
.uint32be('record_count')
.array('records', {
length: 'record_count',
type: BCertStructs.BCertExtDataRecord
})
.nest('signature', {
type: BCertStructs.DrmBCertExtDataSignature
});
static DrmBCertServerInfo = new Parser().uint32be('warning_days');
static DrmBcertSecurityVersion = new Parser().uint32be('security_version').uint32be('platform_identifier');
static Attribute = new Parser()
.uint16be('flags')
.uint16be('tag')
.uint32be('length')
.choice('attribute', {
tag: 'tag',
choices: {
1: BCertStructs.DrmBCertBasicInfo,
2: BCertStructs.DrmBCertDomainInfo,
3: BCertStructs.DrmBCertPCInfo,
4: BCertStructs.DrmBCertDeviceInfo,
5: BCertStructs.DrmBCertFeatureInfo,
6: BCertStructs.DrmBCertKeyInfo,
7: BCertStructs.DrmBCertManufacturerInfo,
8: BCertStructs.DrmBCertSignatureInfo,
9: BCertStructs.DrmBCertSilverlightInfo,
10: BCertStructs.DrmBCertMeteringInfo,
11: BCertStructs.DrmBCertExtDataSignKeyInfo,
12: BCertStructs.BCertExtDataContainer,
13: BCertStructs.DrmBCertExtDataSignature,
14: new Parser().buffer('data', {
length: function () {
return (this as any).length - 8;
}
}),
15: BCertStructs.DrmBCertServerInfo,
16: BCertStructs.DrmBcertSecurityVersion,
17: BCertStructs.DrmBcertSecurityVersion
},
defaultChoice: new Parser().buffer('data', {
length: function () {
return (this as any).length - 8;
}
})
});
static BCert = new Parser()
.string('signature', { length: 4, assert: 'CERT' })
.int32be('version')
.int32be('total_length')
.int32be('certificate_length')
.array('attributes', {
type: BCertStructs.Attribute,
lengthInBytes: function () {
return (this as any).total_length - 16;
}
});
static BCertChain = new Parser()
.string('signature', { length: 4, assert: 'CHAI' })
.int32be('version')
.int32be('total_length')
.int32be('flags')
.int32be('certificate_count')
.array('certificates', {
type: BCertStructs.BCert,
length: 'certificate_count'
});
}
export class Certificate {
parsed: any;
_BCERT: Parser;
constructor(parsed_bcert: any, bcert_obj: Parser = BCertStructs.BCert) {
this.parsed = parsed_bcert;
this._BCERT = bcert_obj;
}
// UNSTABLE
static new_leaf_cert(
cert_id: Buffer,
security_level: number,
client_id: Buffer,
signing_key: ECCKey,
encryption_key: ECCKey,
group_key: ECCKey,
parent: CertificateChain,
expiry: number = 0xffffffff,
max_license: number = 10240,
max_header: number = 15360,
max_chain_depth: number = 2
): Certificate {
const basic_info = {
cert_id: cert_id,
security_level: security_level,
flags: 0,
cert_type: 2,
public_key_digest: signing_key.publicSha256Digest(),
expiration_date: expiry,
client_id: client_id
};
const basic_info_attribute = {
flags: 1,
tag: 1,
length: BCertStructs.DrmBCertBasicInfo.encode(basic_info).length + 8,
attribute: basic_info
};
const device_info = {
max_license: max_license,
max_header: max_header,
max_chain_depth: max_chain_depth
};
const device_info_attribute = {
flags: 1,
tag: 4,
length: BCertStructs.DrmBCertDeviceInfo.encode(device_info).length + 8,
attribute: device_info
};
const feature = {
feature_count: 3,
features: [4, 9, 13]
};
const feature_attribute = {
flags: 1,
tag: 5,
length: BCertStructs.DrmBCertFeatureInfo.encode(feature).length + 8,
attribute: feature
};
const cert_key_sign = {
type: 1,
length: 512, // bits
flags: 0,
key: signing_key.privateBytes(),
usages_count: 1,
usages: [1]
};
const cert_key_encrypt = {
type: 1,
length: 512, // bits
flags: 0,
key: encryption_key.privateBytes(),
usages_count: 1,
usages: [2]
};
const key_info = {
key_count: 2,
cert_keys: [cert_key_sign, cert_key_encrypt]
};
const key_info_attribute = {
flags: 1,
tag: 6,
length: BCertStructs.DrmBCertKeyInfo.encode(key_info).length + 8,
attribute: key_info
};
const manufacturer_info = parent.get_certificate(0).get_attribute(7);
const new_bcert_container = {
signature: 'CERT',
version: 1,
total_length: 0,
certificate_length: 0,
attributes: [basic_info_attribute, device_info_attribute, feature_attribute, key_info_attribute, manufacturer_info]
};
let payload = BCertStructs.BCert.encode(new_bcert_container);
new_bcert_container.certificate_length = payload.length;
new_bcert_container.total_length = payload.length + 144;
payload = BCertStructs.BCert.encode(new_bcert_container);
const hash = createHash('sha256');
hash.update(payload);
const digest = hash.digest();
const signatureObj = group_key.keyPair.sign(digest);
const r = Buffer.from(signatureObj.r.toArray('be', 32));
const s = Buffer.from(signatureObj.s.toArray('be', 32));
const signature = Buffer.concat([r, s]);
const signature_info = {
signature_type: 1,
signature_size: 64,
signature: signature,
signature_key_size: 512, // bits
signature_key: group_key.publicBytes()
};
const signature_info_attribute = {
flags: 1,
tag: 8,
length: BCertStructs.DrmBCertSignatureInfo.encode(signature_info).length + 8,
attribute: signature_info
};
new_bcert_container.attributes.push(signature_info_attribute);
return new Certificate(new_bcert_container);
}
static loads(data: string | Buffer): Certificate {
if (typeof data === 'string') {
data = Buffer.from(data, 'base64');
}
if (!Buffer.isBuffer(data)) {
throw new Error(`Expecting Bytes or Base64 input, got ${data}`);
}
const cert = BCertStructs.BCert;
const parsed_bcert = cert.parse(data);
return new Certificate(parsed_bcert, cert);
}
static load(filePath: string): Certificate {
const data = fs.readFileSync(filePath);
return Certificate.loads(data);
}
get_attribute(type_: number) {
for (const attribute of this.parsed.attributes) {
if (attribute.tag === type_) {
return attribute;
}
}
}
get_security_level(): number {
const basic_info_attribute = this.get_attribute(1);
if (basic_info_attribute) {
return basic_info_attribute.attribute.security_level;
}
return 0;
}
private static _unpad(name: Buffer): string {
return name.toString('utf8').replace(/\0+$/, '');
}
get_name(): string {
const manufacturer_info_attribute = this.get_attribute(7);
if (manufacturer_info_attribute) {
const manufacturer_info = manufacturer_info_attribute.attribute;
const manufacturer_name = Certificate._unpad(manufacturer_info.manufacturer_name);
const model_name = Certificate._unpad(manufacturer_info.model_name);
const model_number = Certificate._unpad(manufacturer_info.model_number);
return `${manufacturer_name} ${model_name} ${model_number}`;
}
return '';
}
dumps(): Buffer {
return this._BCERT.encode(this.parsed);
}
struct(): Parser {
return this._BCERT;
}
}
export class CertificateChain {
parsed: any;
_BCERT_CHAIN: Parser;
constructor(parsed_bcert_chain: any, bcert_chain_obj: Parser = BCertStructs.BCertChain) {
this.parsed = parsed_bcert_chain;
this._BCERT_CHAIN = bcert_chain_obj;
}
static loads(data: string | Buffer): CertificateChain {
if (typeof data === 'string') {
data = Buffer.from(data, 'base64');
}
if (!Buffer.isBuffer(data)) {
throw new Error(`Expecting Bytes or Base64 input, got ${data}`);
}
const cert_chain = BCertStructs.BCertChain;
try {
const parsed_bcert_chain = cert_chain.parse(data);
return new CertificateChain(parsed_bcert_chain, cert_chain);
} catch (error) {
console.error('Error during parsing:', error);
throw error;
}
}
static load(filePath: string): CertificateChain {
const data = fs.readFileSync(filePath);
return CertificateChain.loads(data);
}
dumps(): Buffer {
return this._BCERT_CHAIN.encode(this.parsed);
}
struct(): Parser {
return this._BCERT_CHAIN;
}
get_certificate(index: number): Certificate {
return new Certificate(this.parsed.certificates[index]);
}
get_security_level(): number {
return this.get_certificate(0).get_security_level();
}
get_name(): string {
return this.get_certificate(0).get_name();
}
append(bcert: Certificate): void {
this.parsed.certificate_count += 1;
this.parsed.certificates.push(bcert.parsed);
this.parsed.total_length += bcert.dumps().length;
}
prepend(bcert: Certificate): void {
this.parsed.certificate_count += 1;
this.parsed.certificates.unshift(bcert.parsed);
this.parsed.total_length += bcert.dumps().length;
}
}

View file

@ -1,228 +0,0 @@
import { CertificateChain } from './bcert';
import ECCKey from './ecc_key';
import ElGamal, { Point } from './elgamal';
import XmlKey from './xml_key';
import { Key } from './key';
import { XmrUtil } from './xmrlicense';
import crypto from 'crypto';
import { randomBytes } from 'crypto';
import { createHash } from 'crypto';
import elliptic from 'elliptic';
import { Device } from './device';
import { XMLParser } from 'fast-xml-parser';
export default class Cdm {
security_level: number;
certificate_chain: CertificateChain;
encryption_key: ECCKey;
signing_key: ECCKey;
client_version: string;
la_version: number;
curve: elliptic.ec;
elgamal: ElGamal;
private wmrm_key: elliptic.ec.KeyPair;
private xml_key: XmlKey;
constructor(
security_level: number,
certificate_chain: CertificateChain,
encryption_key: ECCKey,
signing_key: ECCKey,
client_version: string = '2.4.117.27',
la_version: number = 1
) {
this.security_level = security_level;
this.certificate_chain = certificate_chain;
this.encryption_key = encryption_key;
this.signing_key = signing_key;
this.client_version = client_version;
this.la_version = la_version;
this.curve = new elliptic.ec('p256');
this.elgamal = new ElGamal(this.curve);
const x = 'c8b6af16ee941aadaa5389b4af2c10e356be42af175ef3face93254e7b0b3d9b';
const y = '982b27b5cb2341326e56aa857dbfd5c634ce2cf9ea74fca8f2af5957efeea562';
this.wmrm_key = this.curve.keyFromPublic({ x, y }, 'hex');
this.xml_key = new XmlKey();
}
static fromDevice(device: Device): Cdm {
return new Cdm(device.security_level, device.group_certificate, device.encryption_key, device.signing_key);
}
private getKeyData(): Buffer {
const messagePoint = this.xml_key.getPoint(this.elgamal.curve);
const [point1, point2] = this.elgamal.encrypt(messagePoint, this.wmrm_key.getPublic() as Point);
const bufferArray = Buffer.concat([ElGamal.toBytes(point1.getX()), ElGamal.toBytes(point1.getY()), ElGamal.toBytes(point2.getX()), ElGamal.toBytes(point2.getY())]);
return bufferArray;
}
private getCipherData(): Buffer {
const b64_chain = this.certificate_chain.dumps().toString('base64');
const body = `<Data><CertificateChains><CertificateChain>${b64_chain}</CertificateChain></CertificateChains><Features><Feature Name="AESCBC"></Feature></Features></Data>`;
const cipher = crypto.createCipheriv('aes-128-cbc', this.xml_key.aesKey, this.xml_key.aesIv);
const ciphertext = Buffer.concat([cipher.update(Buffer.from(body, 'utf-8')), cipher.final()]);
return Buffer.concat([this.xml_key.aesIv, ciphertext]);
}
private buildDigestContent(content_header: string, nonce: string, wmrm_cipher: string, cert_cipher: string): string {
const clientTime = Math.floor(Date.now() / 1000);
return (
'<LA xmlns="http://schemas.microsoft.com/DRM/2007/03/protocols" Id="SignedData" xml:space="preserve">' +
'<Version>4</Version>' +
`<ContentHeader>${content_header}</ContentHeader>` +
'<CLIENTINFO>' +
`<CLIENTVERSION>${this.client_version}</CLIENTVERSION>` +
'</CLIENTINFO>' +
`<LicenseNonce>${nonce}</LicenseNonce>` +
`<ClientTime>${clientTime}</ClientTime>` +
'<EncryptedData xmlns="http://www.w3.org/2001/04/xmlenc#" Type="http://www.w3.org/2001/04/xmlenc#Element">' +
'<EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"></EncryptionMethod>' +
'<KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#">' +
'<EncryptedKey xmlns="http://www.w3.org/2001/04/xmlenc#">' +
'<EncryptionMethod Algorithm="http://schemas.microsoft.com/DRM/2007/03/protocols#ecc256"></EncryptionMethod>' +
'<KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#">' +
'<KeyName>WMRMServer</KeyName>' +
'</KeyInfo>' +
'<CipherData>' +
`<CipherValue>${wmrm_cipher}</CipherValue>` +
'</CipherData>' +
'</EncryptedKey>' +
'</KeyInfo>' +
'<CipherData>' +
`<CipherValue>${cert_cipher}</CipherValue>` +
'</CipherData>' +
'</EncryptedData>' +
'</LA>'
);
}
private static buildSignedInfo(digest_value: string): string {
return (
'<SignedInfo xmlns="http://www.w3.org/2000/09/xmldsig#">' +
'<CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315"></CanonicalizationMethod>' +
'<SignatureMethod Algorithm="http://schemas.microsoft.com/DRM/2007/03/protocols#ecdsa-sha256"></SignatureMethod>' +
'<Reference URI="#SignedData">' +
'<DigestMethod Algorithm="http://schemas.microsoft.com/DRM/2007/03/protocols#sha256"></DigestMethod>' +
`<DigestValue>${digest_value}</DigestValue>` +
'</Reference>' +
'</SignedInfo>'
);
}
getLicenseChallenge(content_header: string): string {
const nonce = randomBytes(16).toString('base64');
const wmrm_cipher = this.getKeyData().toString('base64');
const cert_cipher = this.getCipherData().toString('base64');
const la_content = this.buildDigestContent(content_header, nonce, wmrm_cipher, cert_cipher);
const la_hash = createHash('sha256').update(la_content, 'utf-8').digest();
const signed_info = Cdm.buildSignedInfo(la_hash.toString('base64'));
const signed_info_digest = createHash('sha256').update(signed_info, 'utf-8').digest();
const signatureObj = this.signing_key.keyPair.sign(signed_info_digest);
const r = signatureObj.r.toArrayLike(Buffer, 'be', 32);
const s = signatureObj.s.toArrayLike(Buffer, 'be', 32);
const rawSignature = Buffer.concat([r, s]);
const signatureValue = rawSignature.toString('base64');
const publicKeyBytes = this.signing_key.keyPair.getPublic().encode('array', false);
const publicKeyBuffer = Buffer.from(publicKeyBytes);
const publicKeyBase64 = publicKeyBuffer.toString('base64');
const main_body =
'<?xml version="1.0" encoding="utf-8"?>' +
'<soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' +
'xmlns:xsd="http://www.w3.org/2001/XMLSchema" ' +
'xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">' +
'<soap:Body>' +
'<AcquireLicense xmlns="http://schemas.microsoft.com/DRM/2007/03/protocols">' +
'<challenge>' +
'<Challenge xmlns="http://schemas.microsoft.com/DRM/2007/03/protocols/messages">' +
la_content +
'<Signature xmlns="http://www.w3.org/2000/09/xmldsig#">' +
signed_info +
`<SignatureValue>${signatureValue}</SignatureValue>` +
'<KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#">' +
'<KeyValue>' +
'<ECCKeyValue>' +
`<PublicKey>${publicKeyBase64}</PublicKey>` +
'</ECCKeyValue>' +
'</KeyValue>' +
'</KeyInfo>' +
'</Signature>' +
'</Challenge>' +
'</challenge>' +
'</AcquireLicense>' +
'</soap:Body>' +
'</soap:Envelope>';
return main_body;
}
private decryptEcc256Key(encrypted_key: Buffer): Buffer {
const point1 = this.curve.curve.point(encrypted_key.subarray(0, 32).toString('hex'), encrypted_key.subarray(32, 64).toString('hex'));
const point2 = this.curve.curve.point(encrypted_key.subarray(64, 96).toString('hex'), encrypted_key.subarray(96, 128).toString('hex'));
const decrypted = ElGamal.decrypt([point1, point2], this.encryption_key.keyPair.getPrivate());
const decryptedBytes = decrypted.getX().toArray('be', 32).slice(16, 32);
return Buffer.from(decryptedBytes);
}
parseLicense(license: string | Buffer): {
key_id: string;
key_type: number;
cipher_type: number;
key_length: number;
key: string;
}[] {
try {
const parser = new XMLParser({
removeNSPrefix: true
});
const result = parser.parse(license);
let licenses = result['Envelope']['Body']['AcquireLicenseResponse']['AcquireLicenseResult']['Response']['LicenseResponse']['Licenses']['License'];
if (!Array.isArray(licenses)) {
licenses = [licenses];
}
const keys = [];
for (const licenseElement of licenses) {
const keyMaterial = XmrUtil.parse(Buffer.from(licenseElement, 'base64')).license.license.keyMaterial;
if (!keyMaterial || !keyMaterial.contentKey) throw new Error('No Content Keys retrieved');
keys.push(
new Key(
keyMaterial.contentKey.kid,
keyMaterial.contentKey.keyType,
keyMaterial.contentKey.ciphertype,
keyMaterial.contentKey.length,
this.decryptEcc256Key(keyMaterial.contentKey.value)
)
);
}
return keys;
} catch (error) {
throw new Error(`Unable to parse license, ${error}`);
}
}
}

View file

@ -1,124 +0,0 @@
import { Parser } from 'binary-parser-encoder';
import { CertificateChain } from './bcert';
import ECCKey from './ecc_key';
import * as fs from 'fs';
type RawDeviceV2 = {
signature: string;
version: number;
group_certificate_length: number;
group_certificate: Buffer;
encryption_key: Buffer;
signing_key: Buffer;
};
class DeviceStructs {
static magic = 'PRD';
static v1 = new Parser()
.string('signature', { length: 3, assert: DeviceStructs.magic })
.uint8('version')
.uint32('group_key_length')
.buffer('group_key', { length: 'group_key_length' })
.uint32('group_certificate_length')
.buffer('group_certificate', { length: 'group_certificate_length' });
static v2 = new Parser()
.string('signature', { length: 3, assert: DeviceStructs.magic })
.uint8('version')
.uint32('group_certificate_length')
.buffer('group_certificate', { length: 'group_certificate_length' })
.buffer('encryption_key', { length: 96 })
.buffer('signing_key', { length: 96 });
static v3 = new Parser()
.string('signature', { length: 3, assert: DeviceStructs.magic })
.uint8('version')
.buffer('group_key', { length: 96 })
.buffer('encryption_key', { length: 96 })
.buffer('signing_key', { length: 96 })
.uint32('group_certificate_length')
.buffer('group_certificate', { length: 'group_certificate_length' });
}
export class Device {
static CURRENT_STRUCT = DeviceStructs.v3;
group_certificate: CertificateChain;
encryption_key: ECCKey;
signing_key: ECCKey;
security_level: number;
constructor(parsedData: RawDeviceV2) {
this.group_certificate = CertificateChain.loads(parsedData.group_certificate);
this.encryption_key = ECCKey.loads(parsedData.encryption_key);
this.signing_key = ECCKey.loads(parsedData.signing_key);
this.security_level = this.group_certificate.get_security_level();
}
static loads(data: Buffer): Device {
const parsedData = Device.CURRENT_STRUCT.parse(data);
return new Device(parsedData);
}
static load(filePath: string): Device {
const data = fs.readFileSync(filePath);
return Device.loads(data);
}
dumps(): Buffer {
const groupCertBytes = this.group_certificate.dumps();
const encryptionKeyBytes = this.encryption_key.dumps();
const signingKeyBytes = this.signing_key.dumps();
const buildData = {
signature: DeviceStructs.magic,
version: 2,
group_certificate_length: groupCertBytes.length,
group_certificate: groupCertBytes,
encryption_key: encryptionKeyBytes,
signing_key: signingKeyBytes
};
return Device.CURRENT_STRUCT.encode(buildData);
}
dump(filePath: string): void {
const data = this.dumps();
fs.writeFileSync(filePath, data);
}
get_name(): string {
const name = `${this.group_certificate.get_name()}_sl${this.security_level}`;
return name.replace(/[^a-zA-Z0-9]/g, '_').toLowerCase();
}
}
// Device V2 disabled because unstable provisioning
// export class Device {
// group_certificate: CertificateChain
// encryption_key: ECCKey
// signing_key: ECCKey
// security_level: number
// constructor(group_certificate: Buffer, group_key: Buffer) {
// this.group_certificate = CertificateChain.loads(group_certificate)
// this.encryption_key = ECCKey.generate()
// this.signing_key = ECCKey.generate()
// this.security_level = this.group_certificate.get_security_level()
// const new_certificate = Certificate.new_key_cert(
// randomBytes(16),
// this.group_certificate.get_security_level(),
// randomBytes(16),
// this.signing_key,
// this.encryption_key,
// ECCKey.loads(group_key),
// this.group_certificate
// )
// this.group_certificate.prepend(new_certificate)
// }
// }

View file

@ -1,91 +0,0 @@
import elliptic from 'elliptic';
import { createHash } from 'crypto';
import * as fs from 'fs';
export default class ECCKey {
keyPair: elliptic.ec.KeyPair;
constructor(keyPair: elliptic.ec.KeyPair) {
this.keyPair = keyPair;
}
static generate(): ECCKey {
const EC = new elliptic.ec('p256');
const keyPair = EC.genKeyPair();
return new ECCKey(keyPair);
}
static construct(privateKey: Buffer | string | number): ECCKey {
if (Buffer.isBuffer(privateKey)) {
privateKey = privateKey.toString('hex');
} else if (typeof privateKey === 'number') {
privateKey = privateKey.toString(16);
}
const EC = new elliptic.ec('p256');
const keyPair = EC.keyFromPrivate(privateKey, 'hex');
return new ECCKey(keyPair);
}
static loads(data: string | Buffer): ECCKey {
if (typeof data === 'string') {
data = Buffer.from(data, 'base64');
}
if (!Buffer.isBuffer(data)) {
throw new Error(`Expecting Bytes or Base64 input, got ${data}`);
}
if (data.length !== 96 && data.length !== 32) {
throw new Error(`Invalid data length. Expecting 96 or 32 bytes, got ${data.length}`);
}
const privateKey = data.subarray(0, 32);
return ECCKey.construct(privateKey);
}
static load(filePath: string): ECCKey {
const data = fs.readFileSync(filePath);
return ECCKey.loads(data);
}
dumps(): Buffer {
return Buffer.concat([this.privateBytes(), this.publicBytes()]);
}
dump(filePath: string): void {
fs.writeFileSync(filePath, this.dumps());
}
getPoint(): { x: string; y: string } {
const publicKey = this.keyPair.getPublic();
return {
x: publicKey.getX().toString('hex'),
y: publicKey.getY().toString('hex')
};
}
privateBytes(): Buffer {
const privateKey = this.keyPair.getPrivate();
return Buffer.from(privateKey.toArray('be', 32));
}
privateSha256Digest(): Buffer {
const hash = createHash('sha256');
hash.update(this.privateBytes());
return hash.digest();
}
publicBytes(): Buffer {
const publicKey = this.keyPair.getPublic();
const x = publicKey.getX().toArray('be', 32);
const y = publicKey.getY().toArray('be', 32);
return Buffer.concat([Buffer.from(x), Buffer.from(y)]);
}
publicSha256Digest(): Buffer {
const hash = createHash('sha256');
hash.update(this.publicBytes());
return hash.digest();
}
}

View file

@ -1,43 +0,0 @@
import { ec as EC } from 'elliptic';
import { randomBytes } from 'crypto';
import BN from 'bn.js';
export interface Point {
getY(): BN;
getX(): BN;
add(point: Point): Point;
mul(n: BN | bigint | number): Point;
neg(): Point;
}
export default class ElGamal {
curve: EC;
constructor(curve: EC) {
this.curve = curve;
}
static toBytes(n: BN): Uint8Array {
const byteArray = n.toString(16).padStart(2, '0');
if (byteArray.length % 2 !== 0) {
return Uint8Array.from(Buffer.from('0' + byteArray, 'hex'));
}
return Uint8Array.from(Buffer.from(byteArray, 'hex'));
}
encrypt(messagePoint: Point, publicKey: Point): [Point, Point] {
const ephemeralKey = new BN(randomBytes(32).toString('hex'), 16).mod(this.curve.n!);
const ephemeralKeyBigInt = BigInt(ephemeralKey.toString(10));
const point1 = this.curve.g.mul(ephemeralKeyBigInt);
const point2 = messagePoint.add(publicKey.mul(ephemeralKeyBigInt));
return [point1, point2];
}
static decrypt(encrypted: [Point, Point], privateKey: BN): Point {
const [point1, point2] = encrypted;
const sharedSecret = point1.mul(privateKey);
const decryptedMessage = point2.add(sharedSecret.neg());
return decryptedMessage;
}
}

View file

@ -1,63 +0,0 @@
enum KeyType {
Invalid = 0x0000,
AES128CTR = 0x0001,
RC4 = 0x0002,
AES128ECB = 0x0003,
Cocktail = 0x0004,
AESCBC = 0x0005,
UNKNOWN = 0xffff
}
function getKeyType(value: number): KeyType {
switch (value) {
case KeyType.Invalid:
case KeyType.AES128CTR:
case KeyType.RC4:
case KeyType.AES128ECB:
case KeyType.Cocktail:
case KeyType.AESCBC:
return value;
default:
return KeyType.UNKNOWN;
}
}
enum CipherType {
Invalid = 0x0000,
RSA128 = 0x0001,
ChainedLicense = 0x0002,
ECC256 = 0x0003,
ECCforScalableLicenses = 0x0004,
Scalable = 0x0005,
UNKNOWN = 0xffff
}
function getCipherType(value: number): CipherType {
switch (value) {
case CipherType.Invalid:
case CipherType.RSA128:
case CipherType.ChainedLicense:
case CipherType.ECC256:
case CipherType.ECCforScalableLicenses:
case CipherType.Scalable:
return value;
default:
return CipherType.UNKNOWN;
}
}
export class Key {
key_id: string;
key_type: KeyType;
cipher_type: CipherType;
key_length: number;
key: string;
constructor(key_id: string, key_type: number, cipher_type: number, key_length: number, key: Buffer) {
this.key_id = key_id;
this.key_type = getKeyType(key_type);
this.cipher_type = getCipherType(cipher_type);
this.key_length = key_length;
this.key = key.toString('hex');
}
}

View file

@ -1,122 +0,0 @@
import { Parser } from 'binary-parser';
import { Buffer } from 'buffer';
import WRMHeader from './wrmheader';
const SYSTEM_ID = Buffer.from('9a04f07998404286ab92e65be0885f95', 'hex');
const PSSHBox = new Parser()
.uint32('length')
.string('pssh', { length: 4, assert: 'pssh' })
.uint32('fullbox')
.buffer('system_id', { length: 16 })
.uint32('data_length')
.buffer('data', {
length: 'data_length'
});
const PlayreadyObject = new Parser()
.useContextVars()
.uint16('type')
.uint16('length')
.choice('data', {
tag: 'type',
choices: {
1: new Parser().string('data', {
length: function () {
return (this as any).$parent.length;
},
encoding: 'utf16le'
})
},
defaultChoice: new Parser().buffer('data', {
length: function () {
return (this as any).$parent.length;
}
})
});
const PlayreadyHeader = new Parser().uint32('length').uint16('record_count').array('records', {
length: 'record_count',
type: PlayreadyObject
});
function isPlayreadyPsshBox(data: Buffer): boolean {
if (data.length < 28) return false;
return data.subarray(12, 28).equals(SYSTEM_ID);
}
function isUtf16(data: Buffer): boolean {
for (let i = 1; i < data.length; i += 2) {
if (data[i] !== 0) {
return false;
}
}
return true;
}
function* getWrmHeaders(wrm_header: any): IterableIterator<string> {
for (const record of wrm_header.records) {
if (record.type === 1 && typeof record.data === 'string') {
yield record.data;
}
}
}
export class PSSH {
public wrm_headers: string[];
constructor(data: string | Buffer) {
if (!data) {
throw new Error('Data must not be empty');
}
if (typeof data === 'string') {
try {
data = Buffer.from(data, 'base64');
} catch (e) {
throw new Error(`Could not decode data as Base64: ${e}`);
}
}
try {
if (isPlayreadyPsshBox(data)) {
const pssh_box = PSSHBox.parse(data);
const psshData = pssh_box.data;
if (isUtf16(psshData)) {
this.wrm_headers = [psshData.toString('utf16le')];
} else if (isUtf16(psshData.subarray(6))) {
this.wrm_headers = [psshData.subarray(6).toString('utf16le')];
} else if (isUtf16(psshData.subarray(10))) {
this.wrm_headers = [psshData.subarray(10).toString('utf16le')];
} else {
const playready_header = PlayreadyHeader.parse(psshData);
this.wrm_headers = Array.from(getWrmHeaders(playready_header));
}
} else {
if (isUtf16(data)) {
this.wrm_headers = [data.toString('utf16le')];
} else if (isUtf16(data.subarray(6))) {
this.wrm_headers = [data.subarray(6).toString('utf16le')];
} else if (isUtf16(data.subarray(10))) {
this.wrm_headers = [data.subarray(10).toString('utf16le')];
} else {
const playready_header = PlayreadyHeader.parse(data);
this.wrm_headers = Array.from(getWrmHeaders(playready_header));
}
}
} catch (e) {
throw new Error('Could not parse data as a PSSH Box nor a PlayReadyHeader');
}
}
// Header downgrade
public get_wrm_headers(downgrade_to_v4: boolean = false): string[] {
return this.wrm_headers.map(downgrade_to_v4 ? this.downgradePSSH : (_) => _);
}
private downgradePSSH(wrm_header: string): string {
const header = new WRMHeader(wrm_header);
return header.to_v4_0_0_0();
}
}

View file

@ -1,88 +0,0 @@
import { XMLParser } from 'fast-xml-parser';
export class SignedKeyID {
constructor(
public alg_id: string,
public value: string,
public checksum?: string
) {}
}
export type Version = '4.0.0.0' | '4.1.0.0' | '4.2.0.0' | '4.3.0.0' | 'UNKNOWN';
export type ReturnStructure = [SignedKeyID[], string | null, string | null, string | null];
interface ParsedWRMHeader {
WRMHEADER: {
'@_version': string;
DATA?: any;
};
}
export default class WRMHeader {
private header: ParsedWRMHeader['WRMHEADER'];
version: Version;
constructor(data: string) {
if (!data) throw new Error('Data must not be empty');
const parser = new XMLParser({
ignoreAttributes: false,
removeNSPrefix: true,
attributeNamePrefix: '@_'
});
const parsed = parser.parse(data) as ParsedWRMHeader;
if (!parsed.WRMHEADER) throw new Error('Data is not a valid WRMHEADER');
this.header = parsed.WRMHEADER;
this.version = WRMHeader.fromString(this.header['@_version']);
}
private static fromString(value: string): Version {
if (['4.0.0.0', '4.1.0.0', '4.2.0.0', '4.3.0.0'].includes(value)) {
return value as Version;
}
return 'UNKNOWN';
}
to_v4_0_0_0(): string {
const [key_ids, la_url, lui_url, ds_id] = this.readAttributes();
if (key_ids.length === 0) throw new Error('No Key IDs available');
const key_id = key_ids[0];
return `<WRMHEADER xmlns="http://schemas.microsoft.com/DRM/2007/03/PlayReadyHeader" version="4.0.0.0"><DATA><PROTECTINFO><KEYLEN>16</KEYLEN><ALGID>AESCTR</ALGID></PROTECTINFO><KID>${
key_id.value
}</KID>${la_url ? `<LA_URL>${la_url}</LA_URL>` : ''}${lui_url ? `<LUI_URL>${lui_url}</LUI_URL>` : ''}${ds_id ? `<DS_ID>${ds_id}</DS_ID>` : ''}${
key_id.checksum ? `<CHECKSUM>${key_id.checksum}</CHECKSUM>` : ''
}</DATA></WRMHEADER>`;
}
readAttributes(): ReturnStructure {
const data = this.header.DATA;
if (!data) throw new Error('Not a valid PlayReady Header Record, WRMHEADER/DATA required');
switch (this.version) {
case '4.0.0.0':
return WRMHeader.read_v4(data);
case '4.1.0.0':
case '4.2.0.0':
case '4.3.0.0':
return WRMHeader.read_vX(data);
default:
throw new Error(`Unsupported version: ${this.version}`);
}
}
private static read_v4(data: any): ReturnStructure {
const protectInfo = data.PROTECTINFO;
return [[new SignedKeyID(protectInfo.ALGID, data.KID, data.CHECKSUM)], data.LA_URL || null, data.LUI_URL || null, data.DS_ID || null];
}
private static read_vX(data: any): ReturnStructure {
const protectInfo = data.PROTECTINFO;
const signedKeyID: SignedKeyID | undefined = protectInfo.KIDS.KID
? new SignedKeyID(protectInfo.KIDS.KID['@_ALGID'] || '', protectInfo.KIDS.KID['@_VALUE'], protectInfo.KIDS.KID['@_CHECKSUM'])
: undefined;
return [signedKeyID ? [signedKeyID] : [], data.LA_URL || null, data.LUI_URL || null, data.DS_ID || null];
}
}

View file

@ -1,45 +0,0 @@
import BN from 'bn.js';
import { ec as EC } from 'elliptic';
import ECCKey from './ecc_key';
import ElGamal, { Point } from './elgamal';
export default class XmlKey {
private _sharedPoint: ECCKey;
public sharedKeyX: BN;
public sharedKeyY: BN;
public _shared_key_x_bytes: Uint8Array;
public aesIv: Uint8Array;
public aesKey: Uint8Array;
constructor() {
this._sharedPoint = ECCKey.generate();
this.sharedKeyX = this._sharedPoint.keyPair.getPublic().getX();
this.sharedKeyY = this._sharedPoint.keyPair.getPublic().getY();
this._shared_key_x_bytes = ElGamal.toBytes(this.sharedKeyX);
this.aesIv = this._shared_key_x_bytes.subarray(0, 16);
this.aesKey = this._shared_key_x_bytes.subarray(16, 32);
}
getPoint(curve: EC): Point {
return curve.curve.point(this.sharedKeyX, this.sharedKeyY);
}
}
// Make it more undetectable (not working right now)
// import { randomBytes } from 'crypto'
// export default class XmlKey {
// public aesIv: Uint8Array
// public aesKey: Uint8Array
// public bytes: Uint8Array
// constructor() {
// this.aesIv = randomBytes(16)
// this.aesKey = randomBytes(16)
// this.bytes = new Uint8Array([...this.aesIv, ...this.aesKey])
// console.log('XML key (AES/CBC)')
// console.log('iv:', Buffer.from(this.aesIv).toString('hex'))
// console.log('key:', Buffer.from(this.aesKey).toString('hex'))
// console.log('bytes:', this.bytes)
// }
// }

View file

@ -1,228 +0,0 @@
import { Parser } from 'binary-parser';
type ParsedLicense = {
version: number;
rights: string;
length: number;
license: {
length: number;
signature?: {
length: number;
type: string;
value: string;
};
global_container?: {
revocationInfo?: {
version: number;
};
securityLevel?: {
level: number;
};
};
keyMaterial?: {
contentKey?: {
kid: string;
keyType: number;
ciphertype: number;
length: number;
value: Buffer;
};
encryptionKey?: {
curve: number;
length: number;
value: string;
};
auxKeys?: {
count: number;
value: {
location: number;
value: string;
};
};
};
};
};
export class XMRLicenseStructsV2 {
static CONTENT_KEY = new Parser().buffer('kid', { length: 16 }).uint16('keytype').uint16('ciphertype').uint16('length').buffer('value', {
length: 'length'
});
static ECC_KEY = new Parser().uint16('curve').uint16('length').buffer('value', {
length: 'length'
});
static FTLV = new Parser()
.uint16('flags')
.uint16('type')
.uint32('length')
.buffer('value', {
length: function () {
return (this as any).length - 8;
}
});
static AUXILIARY_LOCATIONS = new Parser().uint32('location').buffer('value', { length: 16 });
static AUXILIARY_KEY_OBJECT = new Parser().uint16('count').array('locations', {
length: 'count',
type: XMRLicenseStructsV2.AUXILIARY_LOCATIONS
});
static SIGNATURE = new Parser().uint16('type').uint16('siglength').buffer('signature', {
length: 'siglength'
});
static XMR = new Parser().string('constant', { length: 4, assert: 'XMR\x00' }).int32('version').buffer('rightsid', { length: 16 }).nest('data', {
type: XMRLicenseStructsV2.FTLV
});
}
enum XMRTYPE {
XMR_OUTER_CONTAINER = 0x0001,
XMR_GLOBAL_POLICY_CONTAINER = 0x0002,
XMR_PLAYBACK_POLICY_CONTAINER = 0x0004,
XMR_KEY_MATERIAL_CONTAINER = 0x0009,
XMR_RIGHTS_SETTINGS = 0x000d,
XMR_EMBEDDED_LICENSE_SETTINGS = 0x0033,
XMR_REVOCATION_INFORMATION_VERSION = 0x0032,
XMR_SECURITY_LEVEL = 0x0034,
XMR_CONTENT_KEY_OBJECT = 0x000a,
XMR_ECC_KEY_OBJECT = 0x002a,
XMR_SIGNATURE_OBJECT = 0x000b,
XMR_OUTPUT_LEVEL_RESTRICTION = 0x0005,
XMR_AUXILIARY_KEY_OBJECT = 0x0051,
XMR_EXPIRATION_RESTRICTION = 0x0012,
XMR_ISSUE_DATE = 0x0013,
XMR_EXPLICIT_ANALOG_CONTAINER = 0x0007
}
export class XmrUtil {
public data: Buffer;
public license: ParsedLicense;
constructor(data: Buffer, license: ParsedLicense) {
this.data = data;
this.license = license;
}
static parse(license: Buffer) {
const xmr = XMRLicenseStructsV2.XMR.parse(license);
const parsed_license: ParsedLicense = {
version: xmr.version,
rights: Buffer.from(xmr.rightsid).toString('hex'),
length: license.length,
license: {
length: xmr.data.length
}
};
const container = parsed_license.license;
const data = xmr.data;
let pos = 0;
while (pos < data.length - 16) {
const value = XMRLicenseStructsV2.FTLV.parse(data.value.slice(pos));
// XMR_SIGNATURE_OBJECT
if (value.type === XMRTYPE.XMR_SIGNATURE_OBJECT) {
const signature = XMRLicenseStructsV2.SIGNATURE.parse(value.value);
container.signature = {
length: value.length,
type: signature.type,
value: Buffer.from(signature.signature).toString('hex')
};
}
// XMRTYPE.XMR_GLOBAL_POLICY_CONTAINER
if (value.type === XMRTYPE.XMR_GLOBAL_POLICY_CONTAINER) {
container.global_container = {};
let index = 0;
while (index < value.length - 16) {
const data = XMRLicenseStructsV2.FTLV.parse(value.value.slice(index));
// XMRTYPE.XMR_REVOCATION_INFORMATION_VERSION
if (data.type === XMRTYPE.XMR_REVOCATION_INFORMATION_VERSION) {
container.global_container.revocationInfo = {
version: data.value.readUInt32BE(0)
};
}
// XMRTYPE.XMR_SECURITY_LEVEL
if (data.type === XMRTYPE.XMR_SECURITY_LEVEL) {
container.global_container.securityLevel = {
level: data.value.readUInt16BE(0)
};
}
index += data.length;
}
}
// XMRTYPE.XMR_KEY_MATERIAL_CONTAINER
if (value.type === XMRTYPE.XMR_KEY_MATERIAL_CONTAINER) {
container.keyMaterial = {};
let index = 0;
while (index < value.length - 16) {
const data = XMRLicenseStructsV2.FTLV.parse(value.value.slice(index));
// XMRTYPE.XMR_CONTENT_KEY_OBJECT
if (data.type === XMRTYPE.XMR_CONTENT_KEY_OBJECT) {
const content_key = XMRLicenseStructsV2.CONTENT_KEY.parse(data.value);
container.keyMaterial.contentKey = {
kid: XmrUtil.fixUUID(content_key.kid).toString('hex'),
keyType: content_key.keytype,
ciphertype: content_key.ciphertype,
length: content_key.length,
value: content_key.value
};
}
// XMRTYPE.XMR_ECC_KEY_OBJECT
if (data.type === XMRTYPE.XMR_ECC_KEY_OBJECT) {
const ecc_key = XMRLicenseStructsV2.ECC_KEY.parse(data.value);
container.keyMaterial.encryptionKey = {
curve: ecc_key.curve,
length: ecc_key.length,
value: Buffer.from(ecc_key.value).toString('hex')
};
}
// XMRTYPE.XMR_AUXILIARY_KEY_OBJECT
if (data.type === XMRTYPE.XMR_AUXILIARY_KEY_OBJECT) {
const aux_keys = XMRLicenseStructsV2.AUXILIARY_KEY_OBJECT.parse(data.value);
container.keyMaterial.auxKeys = {
count: aux_keys.count,
value: aux_keys.locations.map((a: any) => {
return {
location: a.location,
value: Buffer.from(a.value).toString('hex')
};
})
};
}
index += data.length;
}
}
pos += value.length;
}
return new XmrUtil(license, parsed_license);
}
static fixUUID(data: Buffer): Buffer {
return Buffer.concat([
Buffer.from(data.subarray(0, 4).reverse()),
Buffer.from(data.subarray(4, 6).reverse()),
Buffer.from(data.subarray(6, 8).reverse()),
data.subarray(8, 16)
]);
}
}

View file

@ -1,113 +0,0 @@
// Modified version of https://github.com/Frooastside/node-widevine
import crypto from 'crypto';
export class AES_CMAC {
private readonly BLOCK_SIZE = 16;
private readonly XOR_RIGHT = Buffer.from([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x87]);
private readonly EMPTY_BLOCK_SIZE_BUFFER = Buffer.alloc(this.BLOCK_SIZE) as Buffer;
private _key: Buffer;
private _subkeys: { first: Buffer; second: Buffer };
public constructor(key: Buffer) {
if (![16, 24, 32].includes(key.length)) {
throw new Error('Key size must be 128, 192, or 256 bits.');
}
this._key = key;
this._subkeys = this._generateSubkeys();
}
public calculate(message: Buffer): Buffer {
const blockCount = this._getBlockCount(message);
let x = this.EMPTY_BLOCK_SIZE_BUFFER;
let y;
for (let i = 0; i < blockCount - 1; i++) {
const from = i * this.BLOCK_SIZE;
const block = message.subarray(from, from + this.BLOCK_SIZE);
y = this._xor(x, block);
x = this._aes(y);
}
y = this._xor(x, this._getLastBlock(message));
x = this._aes(y);
return x;
}
private _generateSubkeys(): { first: Buffer; second: Buffer } {
const l = this._aes(this.EMPTY_BLOCK_SIZE_BUFFER);
let first = this._bitShiftLeft(l);
if (l[0] & 0x80) {
first = this._xor(first, this.XOR_RIGHT);
}
let second = this._bitShiftLeft(first);
if (first[0] & 0x80) {
second = this._xor(second, this.XOR_RIGHT);
}
return { first: first, second: second };
}
private _getBlockCount(message: Buffer): number {
const blockCount = Math.ceil(message.length / this.BLOCK_SIZE);
return blockCount === 0 ? 1 : blockCount;
}
private _aes(message: Buffer): Buffer {
const cipher = crypto.createCipheriv(`aes-${this._key.length * 8}-cbc`, this._key, Buffer.alloc(this.BLOCK_SIZE));
const result = cipher.update(message).subarray(0, 16);
cipher.destroy();
return result;
}
private _getLastBlock(message: Buffer): Buffer {
const blockCount = this._getBlockCount(message);
const paddedBlock = this._padding(message, blockCount - 1);
let complete = false;
if (message.length > 0) {
complete = message.length % this.BLOCK_SIZE === 0;
}
const key = complete ? this._subkeys.first : this._subkeys.second;
return this._xor(paddedBlock, key);
}
private _padding(message: Buffer, blockIndex: number): Buffer {
const block = Buffer.alloc(this.BLOCK_SIZE);
const from = blockIndex * this.BLOCK_SIZE;
const slice = message.subarray(from, from + this.BLOCK_SIZE);
block.set(slice);
if (slice.length !== this.BLOCK_SIZE) {
block[slice.length] = 0x80;
}
return block;
}
private _bitShiftLeft(input: Buffer): Buffer {
const output = Buffer.alloc(input.length);
let overflow = 0;
for (let i = input.length - 1; i >= 0; i--) {
output[i] = (input[i] << 1) | overflow;
overflow = input[i] & 0x80 ? 1 : 0;
}
return output;
}
private _xor(a: Buffer, b: Buffer): Buffer {
const length = Math.min(a.length, b.length);
const output = Buffer.alloc(length);
for (let i = 0; i < length; i++) {
output[i] = a[i] ^ b[i];
}
return output;
}
}

View file

@ -1,303 +0,0 @@
// Modified version of https://github.com/Frooastside/node-widevine
import { AES_CMAC } from './cmac';
import forge from 'node-forge';
import {
ClientIdentification,
ClientIdentificationSchema,
DrmCertificateSchema,
EncryptedClientIdentification,
EncryptedClientIdentificationSchema,
LicenseRequest,
LicenseRequest_ContentIdentification_WidevinePsshDataSchema,
LicenseRequest_ContentIdentificationSchema,
LicenseRequest_RequestType,
LicenseRequestSchema,
LicenseSchema,
LicenseType,
ProtocolVersion,
SignedDrmCertificate,
SignedDrmCertificateSchema,
SignedMessage,
SignedMessage_MessageType,
SignedMessageSchema,
WidevinePsshData,
WidevinePsshDataSchema
} from './license_protocol_pb3';
import { create, fromBinary, toBinary } from '@bufbuild/protobuf';
const WIDEVINE_SYSTEM_ID = new Uint8Array([0xed, 0xef, 0x8b, 0xa9, 0x79, 0xd6, 0x4a, 0xce, 0xa3, 0xc8, 0x27, 0xdc, 0xd5, 0x1d, 0x21, 0xed]);
const WIDEVINE_ROOT_PUBLIC_KEY = new Uint8Array([
0x30, 0x82, 0x01, 0x8a, 0x02, 0x82, 0x01, 0x81, 0x00, 0xb4, 0xfe, 0x39, 0xc3, 0x65, 0x90, 0x03, 0xdb, 0x3c, 0x11, 0x97, 0x09, 0xe8, 0x68, 0xcd, 0xf2, 0xc3, 0x5e, 0x9b, 0xf2,
0xe7, 0x4d, 0x23, 0xb1, 0x10, 0xdb, 0x87, 0x65, 0xdf, 0xdc, 0xfb, 0x9f, 0x35, 0xa0, 0x57, 0x03, 0x53, 0x4c, 0xf6, 0x6d, 0x35, 0x7d, 0xa6, 0x78, 0xdb, 0xb3, 0x36, 0xd2, 0x3f,
0x9c, 0x40, 0xa9, 0x95, 0x26, 0x72, 0x7f, 0xb8, 0xbe, 0x66, 0xdf, 0xc5, 0x21, 0x98, 0x78, 0x15, 0x16, 0x68, 0x5d, 0x2f, 0x46, 0x0e, 0x43, 0xcb, 0x8a, 0x84, 0x39, 0xab, 0xfb,
0xb0, 0x35, 0x80, 0x22, 0xbe, 0x34, 0x23, 0x8b, 0xab, 0x53, 0x5b, 0x72, 0xec, 0x4b, 0xb5, 0x48, 0x69, 0x53, 0x3e, 0x47, 0x5f, 0xfd, 0x09, 0xfd, 0xa7, 0x76, 0x13, 0x8f, 0x0f,
0x92, 0xd6, 0x4c, 0xdf, 0xae, 0x76, 0xa9, 0xba, 0xd9, 0x22, 0x10, 0xa9, 0x9d, 0x71, 0x45, 0xd6, 0xd7, 0xe1, 0x19, 0x25, 0x85, 0x9c, 0x53, 0x9a, 0x97, 0xeb, 0x84, 0xd7, 0xcc,
0xa8, 0x88, 0x82, 0x20, 0x70, 0x26, 0x20, 0xfd, 0x7e, 0x40, 0x50, 0x27, 0xe2, 0x25, 0x93, 0x6f, 0xbc, 0x3e, 0x72, 0xa0, 0xfa, 0xc1, 0xbd, 0x29, 0xb4, 0x4d, 0x82, 0x5c, 0xc1,
0xb4, 0xcb, 0x9c, 0x72, 0x7e, 0xb0, 0xe9, 0x8a, 0x17, 0x3e, 0x19, 0x63, 0xfc, 0xfd, 0x82, 0x48, 0x2b, 0xb7, 0xb2, 0x33, 0xb9, 0x7d, 0xec, 0x4b, 0xba, 0x89, 0x1f, 0x27, 0xb8,
0x9b, 0x88, 0x48, 0x84, 0xaa, 0x18, 0x92, 0x0e, 0x65, 0xf5, 0xc8, 0x6c, 0x11, 0xff, 0x6b, 0x36, 0xe4, 0x74, 0x34, 0xca, 0x8c, 0x33, 0xb1, 0xf9, 0xb8, 0x8e, 0xb4, 0xe6, 0x12,
0xe0, 0x02, 0x98, 0x79, 0x52, 0x5e, 0x45, 0x33, 0xff, 0x11, 0xdc, 0xeb, 0xc3, 0x53, 0xba, 0x7c, 0x60, 0x1a, 0x11, 0x3d, 0x00, 0xfb, 0xd2, 0xb7, 0xaa, 0x30, 0xfa, 0x4f, 0x5e,
0x48, 0x77, 0x5b, 0x17, 0xdc, 0x75, 0xef, 0x6f, 0xd2, 0x19, 0x6d, 0xdc, 0xbe, 0x7f, 0xb0, 0x78, 0x8f, 0xdc, 0x82, 0x60, 0x4c, 0xbf, 0xe4, 0x29, 0x06, 0x5e, 0x69, 0x8c, 0x39,
0x13, 0xad, 0x14, 0x25, 0xed, 0x19, 0xb2, 0xf2, 0x9f, 0x01, 0x82, 0x0d, 0x56, 0x44, 0x88, 0xc8, 0x35, 0xec, 0x1f, 0x11, 0xb3, 0x24, 0xe0, 0x59, 0x0d, 0x37, 0xe4, 0x47, 0x3c,
0xea, 0x4b, 0x7f, 0x97, 0x31, 0x1c, 0x81, 0x7c, 0x94, 0x8a, 0x4c, 0x7d, 0x68, 0x15, 0x84, 0xff, 0xa5, 0x08, 0xfd, 0x18, 0xe7, 0xe7, 0x2b, 0xe4, 0x47, 0x27, 0x12, 0x11, 0xb8,
0x23, 0xec, 0x58, 0x93, 0x3c, 0xac, 0x12, 0xd2, 0x88, 0x6d, 0x41, 0x3d, 0xc5, 0xfe, 0x1c, 0xdc, 0xb9, 0xf8, 0xd4, 0x51, 0x3e, 0x07, 0xe5, 0x03, 0x6f, 0xa7, 0x12, 0xe8, 0x12,
0xf7, 0xb5, 0xce, 0xa6, 0x96, 0x55, 0x3f, 0x78, 0xb4, 0x64, 0x82, 0x50, 0xd2, 0x33, 0x5f, 0x91, 0x02, 0x03, 0x01, 0x00, 0x01
]);
export const SERVICE_CERTIFICATE_CHALLENGE = new Uint8Array([0x08, 0x04]);
const COMMON_SERVICE_CERTIFICATE = new Uint8Array([
0x08, 0x05, 0x12, 0xc7, 0x05, 0x0a, 0xc1, 0x02, 0x08, 0x03, 0x12, 0x10, 0x17, 0x05, 0xb9, 0x17, 0xcc, 0x12, 0x04, 0x86, 0x8b, 0x06, 0x33, 0x3a, 0x2f, 0x77, 0x2a, 0x8c, 0x18,
0x82, 0xb4, 0x82, 0x92, 0x05, 0x22, 0x8e, 0x02, 0x30, 0x82, 0x01, 0x0a, 0x02, 0x82, 0x01, 0x01, 0x00, 0x99, 0xed, 0x5b, 0x3b, 0x32, 0x7d, 0xab, 0x5e, 0x24, 0xef, 0xc3, 0xb6,
0x2a, 0x95, 0xb5, 0x98, 0x52, 0x0a, 0xd5, 0xbc, 0xcb, 0x37, 0x50, 0x3e, 0x06, 0x45, 0xb8, 0x14, 0xd8, 0x76, 0xb8, 0xdf, 0x40, 0x51, 0x04, 0x41, 0xad, 0x8c, 0xe3, 0xad, 0xb1,
0x1b, 0xb8, 0x8c, 0x4e, 0x72, 0x5a, 0x5e, 0x4a, 0x9e, 0x07, 0x95, 0x29, 0x1d, 0x58, 0x58, 0x40, 0x23, 0xa7, 0xe1, 0xaf, 0x0e, 0x38, 0xa9, 0x12, 0x79, 0x39, 0x30, 0x08, 0x61,
0x0b, 0x6f, 0x15, 0x8c, 0x87, 0x8c, 0x7e, 0x21, 0xbf, 0xfb, 0xfe, 0xea, 0x77, 0xe1, 0x01, 0x9e, 0x1e, 0x57, 0x81, 0xe8, 0xa4, 0x5f, 0x46, 0x26, 0x3d, 0x14, 0xe6, 0x0e, 0x80,
0x58, 0xa8, 0x60, 0x7a, 0xdc, 0xe0, 0x4f, 0xac, 0x84, 0x57, 0xb1, 0x37, 0xa8, 0xd6, 0x7c, 0xcd, 0xeb, 0x33, 0x70, 0x5d, 0x98, 0x3a, 0x21, 0xfb, 0x4e, 0xec, 0xbd, 0x4a, 0x10,
0xca, 0x47, 0x49, 0x0c, 0xa4, 0x7e, 0xaa, 0x5d, 0x43, 0x82, 0x18, 0xdd, 0xba, 0xf1, 0xca, 0xde, 0x33, 0x92, 0xf1, 0x3d, 0x6f, 0xfb, 0x64, 0x42, 0xfd, 0x31, 0xe1, 0xbf, 0x40,
0xb0, 0xc6, 0x04, 0xd1, 0xc4, 0xba, 0x4c, 0x95, 0x20, 0xa4, 0xbf, 0x97, 0xee, 0xbd, 0x60, 0x92, 0x9a, 0xfc, 0xee, 0xf5, 0x5b, 0xba, 0xf5, 0x64, 0xe2, 0xd0, 0xe7, 0x6c, 0xd7,
0xc5, 0x5c, 0x73, 0xa0, 0x82, 0xb9, 0x96, 0x12, 0x0b, 0x83, 0x59, 0xed, 0xce, 0x24, 0x70, 0x70, 0x82, 0x68, 0x0d, 0x6f, 0x67, 0xc6, 0xd8, 0x2c, 0x4a, 0xc5, 0xf3, 0x13, 0x44,
0x90, 0xa7, 0x4e, 0xec, 0x37, 0xaf, 0x4b, 0x2f, 0x01, 0x0c, 0x59, 0xe8, 0x28, 0x43, 0xe2, 0x58, 0x2f, 0x0b, 0x6b, 0x9f, 0x5d, 0xb0, 0xfc, 0x5e, 0x6e, 0xdf, 0x64, 0xfb, 0xd3,
0x08, 0xb4, 0x71, 0x1b, 0xcf, 0x12, 0x50, 0x01, 0x9c, 0x9f, 0x5a, 0x09, 0x02, 0x03, 0x01, 0x00, 0x01, 0x3a, 0x14, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x2e, 0x77, 0x69,
0x64, 0x65, 0x76, 0x69, 0x6e, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x12, 0x80, 0x03, 0xae, 0x34, 0x73, 0x14, 0xb5, 0xa8, 0x35, 0x29, 0x7f, 0x27, 0x13, 0x88, 0xfb, 0x7b, 0xb8, 0xcb,
0x52, 0x77, 0xd2, 0x49, 0x82, 0x3c, 0xdd, 0xd1, 0xda, 0x30, 0xb9, 0x33, 0x39, 0x51, 0x1e, 0xb3, 0xcc, 0xbd, 0xea, 0x04, 0xb9, 0x44, 0xb9, 0x27, 0xc1, 0x21, 0x34, 0x6e, 0xfd,
0xbd, 0xea, 0xc9, 0xd4, 0x13, 0x91, 0x7e, 0x6e, 0xc1, 0x76, 0xa1, 0x04, 0x38, 0x46, 0x0a, 0x50, 0x3b, 0xc1, 0x95, 0x2b, 0x9b, 0xa4, 0xe4, 0xce, 0x0f, 0xc4, 0xbf, 0xc2, 0x0a,
0x98, 0x08, 0xaa, 0xaf, 0x4b, 0xfc, 0xd1, 0x9c, 0x1d, 0xcf, 0xcd, 0xf5, 0x74, 0xcc, 0xac, 0x28, 0xd1, 0xb4, 0x10, 0x41, 0x6c, 0xf9, 0xde, 0x88, 0x04, 0x30, 0x1c, 0xbd, 0xb3,
0x34, 0xca, 0xfc, 0xd0, 0xd4, 0x09, 0x78, 0x42, 0x3a, 0x64, 0x2e, 0x54, 0x61, 0x3d, 0xf0, 0xaf, 0xcf, 0x96, 0xca, 0x4a, 0x92, 0x49, 0xd8, 0x55, 0xe4, 0x2b, 0x3a, 0x70, 0x3e,
0xf1, 0x76, 0x7f, 0x6a, 0x9b, 0xd3, 0x6d, 0x6b, 0xf8, 0x2b, 0xe7, 0x6b, 0xbf, 0x0c, 0xba, 0x4f, 0xde, 0x59, 0xd2, 0xab, 0xcc, 0x76, 0xfe, 0xb6, 0x42, 0x47, 0xb8, 0x5c, 0x43,
0x1f, 0xbc, 0xa5, 0x22, 0x66, 0xb6, 0x19, 0xfc, 0x36, 0x97, 0x95, 0x43, 0xfc, 0xa9, 0xcb, 0xbd, 0xbb, 0xfa, 0xfa, 0x0e, 0x1a, 0x55, 0xe7, 0x55, 0xa3, 0xc7, 0xbc, 0xe6, 0x55,
0xf9, 0x64, 0x6f, 0x58, 0x2a, 0xb9, 0xcf, 0x70, 0xaa, 0x08, 0xb9, 0x79, 0xf8, 0x67, 0xf6, 0x3a, 0x0b, 0x2b, 0x7f, 0xdb, 0x36, 0x2c, 0x5b, 0xc4, 0xec, 0xd5, 0x55, 0xd8, 0x5b,
0xca, 0xa9, 0xc5, 0x93, 0xc3, 0x83, 0xc8, 0x57, 0xd4, 0x9d, 0xaa, 0xb7, 0x7e, 0x40, 0xb7, 0x85, 0x1d, 0xdf, 0xd2, 0x49, 0x98, 0x80, 0x8e, 0x35, 0xb2, 0x58, 0xe7, 0x5d, 0x78,
0xea, 0xc0, 0xca, 0x16, 0xf7, 0x04, 0x73, 0x04, 0xc2, 0x0d, 0x93, 0xed, 0xe4, 0xe8, 0xff, 0x1c, 0x6f, 0x17, 0xe6, 0x24, 0x3e, 0x3f, 0x3d, 0xa8, 0xfc, 0x17, 0x09, 0x87, 0x0e,
0xc4, 0x5f, 0xba, 0x82, 0x3a, 0x26, 0x3f, 0x0c, 0xef, 0xa1, 0xf7, 0x09, 0x3b, 0x19, 0x09, 0x92, 0x83, 0x26, 0x33, 0x37, 0x05, 0x04, 0x3a, 0x29, 0xbd, 0xa6, 0xf9, 0xb4, 0x34,
0x2c, 0xc8, 0xdf, 0x54, 0x3c, 0xb1, 0xa1, 0x18, 0x2f, 0x7c, 0x5f, 0xff, 0x33, 0xf1, 0x04, 0x90, 0xfa, 0xca, 0x5b, 0x25, 0x36, 0x0b, 0x76, 0x01, 0x5e, 0x9c, 0x5a, 0x06, 0xab,
0x8e, 0xe0, 0x2f, 0x00, 0xd2, 0xe8, 0xd5, 0x98, 0x61, 0x04, 0xaa, 0xcc, 0x4d, 0xd4, 0x75, 0xfd, 0x96, 0xee, 0x9c, 0xe4, 0xe3, 0x26, 0xf2, 0x1b, 0x83, 0xc7, 0x05, 0x85, 0x77,
0xb3, 0x87, 0x32, 0xcd, 0xda, 0xbc, 0x6a, 0x6b, 0xed, 0x13, 0xfb, 0x0d, 0x49, 0xd3, 0x8a, 0x45, 0xeb, 0x87, 0xa5, 0xf4
]);
export type KeyContainer = {
kid: string;
key: string;
};
export type ContentDecryptionModule = {
privateKey: Buffer;
identifierBlob: Buffer;
};
export class Session {
private _devicePrivateKey: forge.pki.rsa.PrivateKey;
private _identifierBlob: ClientIdentification;
private _pssh: Buffer;
private _rawLicenseRequest?: Buffer;
private _serviceCertificate?: SignedDrmCertificate;
constructor(contentDecryptionModule: ContentDecryptionModule, pssh: Buffer) {
this._devicePrivateKey = forge.pki.privateKeyFromPem(contentDecryptionModule.privateKey.toString('binary'));
this._identifierBlob = fromBinary(ClientIdentificationSchema, contentDecryptionModule.identifierBlob);
this._pssh = pssh;
}
async setDefaultServiceCertificate() {
await this.setServiceCertificate(Buffer.from(COMMON_SERVICE_CERTIFICATE));
}
async setServiceCertificateFromMessage(rawSignedMessage: Buffer) {
const signedMessage: SignedMessage = fromBinary(SignedMessageSchema, rawSignedMessage);
if (!signedMessage.msg) {
throw new Error('the service certificate message does not contain a message');
}
await this.setServiceCertificate(Buffer.from(signedMessage.msg));
}
async setServiceCertificate(serviceCertificate: Buffer) {
const signedServiceCertificate: SignedDrmCertificate = fromBinary(SignedDrmCertificateSchema, serviceCertificate);
if (!(await this._verifyServiceCertificate(signedServiceCertificate))) {
throw new Error('Service certificate is not signed by the Widevine root certificate');
}
this._serviceCertificate = signedServiceCertificate;
}
createLicenseRequest(licenseType: LicenseType = LicenseType.STREAMING, android: boolean = false): Buffer {
if (!this._pssh.subarray(12, 28).equals(Buffer.from(WIDEVINE_SYSTEM_ID))) {
throw new Error('the pssh is not an actuall pssh');
}
const pssh = this._parsePSSH(this._pssh);
if (!pssh) {
throw new Error('pssh is invalid');
}
const licenseRequest: LicenseRequest = create(LicenseRequestSchema, {
type: LicenseRequest_RequestType.NEW,
contentId: create(LicenseRequest_ContentIdentificationSchema, {
contentIdVariant: {
case: 'widevinePsshData',
value: create(LicenseRequest_ContentIdentification_WidevinePsshDataSchema, {
psshData: [this._pssh.subarray(32)],
licenseType: licenseType,
requestId: android ? this._generateAndroidIdentifier() : this._generateGenericIdentifier()
})
}
}),
requestTime: BigInt(Date.now()) / BigInt(1000),
protocolVersion: ProtocolVersion.VERSION_2_1,
keyControlNonce: Math.floor(Math.random() * 2 ** 31)
});
if (this._serviceCertificate) {
const encryptedClientIdentification = this._encryptClientIdentification(this._identifierBlob, this._serviceCertificate);
licenseRequest.encryptedClientId = encryptedClientIdentification;
} else {
licenseRequest.clientId = this._identifierBlob;
}
this._rawLicenseRequest = Buffer.from(toBinary(LicenseRequestSchema, licenseRequest));
const pss: forge.pss.PSS = forge.pss.create({ md: forge.md.sha1.create(), mgf: forge.mgf.mgf1.create(forge.md.sha1.create()), saltLength: 20 });
const md = forge.md.sha1.create();
md.update(this._rawLicenseRequest.toString('binary'), 'raw');
const signature = Buffer.from(this._devicePrivateKey.sign(md, pss), 'binary');
const signedLicenseRequest: SignedMessage = create(SignedMessageSchema, {
type: SignedMessage_MessageType.LICENSE_REQUEST,
msg: this._rawLicenseRequest,
signature: signature
});
return Buffer.from(toBinary(SignedMessageSchema, signedLicenseRequest));
}
parseLicense(rawLicense: Buffer) {
if (!this._rawLicenseRequest) {
throw new Error('please request a license first');
}
const signedLicense = fromBinary(SignedMessageSchema, rawLicense);
if (!signedLicense.sessionKey) {
throw new Error('the license does not contain a session key');
}
if (!signedLicense.msg) {
throw new Error('the license does not contain a message');
}
if (!signedLicense.signature) {
throw new Error('the license does not contain a signature');
}
const sessionKey = this._devicePrivateKey.decrypt(Buffer.from(signedLicense.sessionKey).toString('binary'), 'RSA-OAEP', {
md: forge.md.sha1.create()
});
const cmac = new AES_CMAC(Buffer.from(sessionKey, 'binary'));
const encKeyBase = Buffer.concat([Buffer.from('ENCRYPTION'), Buffer.from('\x00', 'ascii'), this._rawLicenseRequest, Buffer.from('\x00\x00\x00\x80', 'ascii')]);
const authKeyBase = Buffer.concat([Buffer.from('AUTHENTICATION'), Buffer.from('\x00', 'ascii'), this._rawLicenseRequest, Buffer.from('\x00\x00\x02\x00', 'ascii')]);
const encKey = cmac.calculate(Buffer.concat([Buffer.from('\x01'), encKeyBase]));
const serverKey = Buffer.concat([cmac.calculate(Buffer.concat([Buffer.from('\x01'), authKeyBase])), cmac.calculate(Buffer.concat([Buffer.from('\x02'), authKeyBase]))]);
/*const clientKey = Buffer.concat([
cmac.calculate(Buffer.concat([Buffer.from("\x03"), authKeyBase])),
cmac.calculate(Buffer.concat([Buffer.from("\x04"), authKeyBase]))
]);*/
const hmac = forge.hmac.create();
hmac.start(forge.md.sha256.create(), serverKey.toString('binary'));
hmac.update(Buffer.from(signedLicense.msg).toString('binary'));
const calculatedSignature = Buffer.from(hmac.digest().data, 'binary');
if (!calculatedSignature.equals(signedLicense.signature)) {
throw new Error('signatures do not match');
}
const license = fromBinary(LicenseSchema, signedLicense.msg);
const keyContainers = license.key
.filter((k) => k.id)
.map((keyContainer) => {
if (keyContainer.type && keyContainer.key && keyContainer.iv) {
const keyId = Buffer.from(keyContainer.id!).toString('hex');
const decipher = forge.cipher.createDecipher('AES-CBC', encKey.toString('binary'));
decipher.start({ iv: Buffer.from(keyContainer.iv).toString('binary') });
decipher.update(forge.util.createBuffer(new Uint8Array(keyContainer.key)));
decipher.finish();
const decryptedKey = Buffer.from(decipher.output.data, 'binary');
const key: KeyContainer = {
kid: keyId,
key: decryptedKey.toString('hex')
};
return key;
}
});
if (keyContainers.filter((container) => !!container).length < 1) {
throw new Error('there was not a single valid key in the response');
}
return keyContainers;
}
private _encryptClientIdentification(clientIdentification: ClientIdentification, signedServiceCertificate: SignedDrmCertificate): EncryptedClientIdentification {
if (!signedServiceCertificate.drmCertificate) {
throw new Error('the service certificate does not contain an actual certificate');
}
const serviceCertificate = fromBinary(DrmCertificateSchema, signedServiceCertificate.drmCertificate);
if (!serviceCertificate.publicKey) {
throw new Error('the service certificate does not contain a public key');
}
const key = forge.random.getBytesSync(16);
const iv = forge.random.getBytesSync(16);
const cipher = forge.cipher.createCipher('AES-CBC', key);
cipher.start({ iv: iv });
cipher.update(forge.util.createBuffer(toBinary(ClientIdentificationSchema, clientIdentification)));
cipher.finish();
const rawEncryptedClientIdentification = Buffer.from(cipher.output.data, 'binary');
const publicKey = forge.pki.publicKeyFromAsn1(forge.asn1.fromDer(Buffer.from(serviceCertificate.publicKey).toString('binary')));
const encryptedKey = publicKey.encrypt(key, 'RSA-OAEP', { md: forge.md.sha1.create() });
const encryptedClientIdentification: EncryptedClientIdentification = create(EncryptedClientIdentificationSchema, {
encryptedClientId: rawEncryptedClientIdentification,
encryptedClientIdIv: Buffer.from(iv, 'binary'),
encryptedPrivacyKey: Buffer.from(encryptedKey, 'binary'),
providerId: serviceCertificate.providerId,
serviceCertificateSerialNumber: serviceCertificate.serialNumber
});
return encryptedClientIdentification;
}
private async _verifyServiceCertificate(signedServiceCertificate: SignedDrmCertificate): Promise<boolean> {
if (!signedServiceCertificate.drmCertificate) {
throw new Error('the service certificate does not contain an actual certificate');
}
if (!signedServiceCertificate.signature) {
throw new Error('the service certificate does not contain a signature');
}
const publicKey = forge.pki.publicKeyFromAsn1(forge.asn1.fromDer(Buffer.from(WIDEVINE_ROOT_PUBLIC_KEY).toString('binary')));
const pss: forge.pss.PSS = forge.pss.create({ md: forge.md.sha1.create(), mgf: forge.mgf.mgf1.create(forge.md.sha1.create()), saltLength: 20 });
const sha1 = forge.md.sha1.create();
sha1.update(Buffer.from(signedServiceCertificate.drmCertificate).toString('binary'), 'raw');
return publicKey.verify(sha1.digest().bytes(), Buffer.from(signedServiceCertificate.signature).toString('binary'), pss);
}
private _parsePSSH(pssh: Buffer): WidevinePsshData | null {
try {
return fromBinary(WidevinePsshDataSchema, pssh.subarray(32));
} catch {
return null;
}
}
private _generateAndroidIdentifier(): Buffer {
return Buffer.from(`${forge.util.bytesToHex(forge.random.getBytesSync(8))}${'01'}${'00000000000000'}`);
}
private _generateGenericIdentifier(): Buffer {
return Buffer.from(forge.random.getBytesSync(16), 'binary');
}
get pssh(): Buffer {
return this._pssh;
}
}

File diff suppressed because one or more lines are too long

View file

@ -1,7 +1,7 @@
{
"name": "multi-downloader-nx",
"short_name": "aniDL",
"version": "5.6.0",
"version": "5.6.9",
"description": "Downloader for Crunchyroll, Hidive, and AnimationDigitalNetwork with CLI and GUI",
"keywords": [
"download",
@ -15,8 +15,8 @@
"gui"
],
"engines": {
"node": ">=18",
"pnpm": ">=7"
"node": ">=22",
"pnpm": ">=10"
},
"author": "AnimeDL <AnimeDL@users.noreply.github.com>",
"contributors": [
@ -40,57 +40,44 @@
},
"license": "MIT",
"dependencies": {
"@bufbuild/protobuf": "^2.9.0",
"binary-parser": "^2.2.1",
"binary-parser-encoder": "^1.5.3",
"bn.js": "^5.2.2",
"cors": "^2.8.5",
"elliptic": "^6.6.1",
"esbuild": "^0.25.10",
"express": "^5.1.0",
"fast-xml-parser": "^5.3.0",
"ffprobe": "^1.1.2",
"fs-extra": "^11.3.2",
"@bufbuild/protobuf": "^2.10.2",
"commander": "^14.0.2",
"express": "^5.2.1",
"iso-639": "^0.2.2",
"leven": "^3.1.0",
"leven": "^4.1.0",
"log4js": "^6.9.1",
"long": "^5.3.2",
"lookpath": "^1.2.3",
"m3u8-parsed": "^2.0.0",
"m3u8-parser": "^7.2.0",
"mediainfo.js": "^0.3.6",
"mpd-parser": "^1.3.1",
"node-forge": "^1.3.1",
"ofetch": "^1.4.1",
"open": "^8.4.2",
"protobufjs": "^7.5.4",
"puppeteer-real-browser": "^1.4.4",
"node-playready": "^1.1.1",
"open": "^11.0.0",
"undici": "^7.16.0",
"widevine": "^1.0.3",
"ws": "^8.18.3",
"yaml": "^2.8.1",
"yargs": "17.7.2"
"yaml": "^2.8.2"
},
"devDependencies": {
"@bufbuild/buf": "^1.58.0",
"@bufbuild/protoc-gen-es": "^2.9.0",
"@eslint/js": "^9.37.0",
"@types/bn.js": "^5.2.0",
"@types/cors": "^2.8.19",
"@types/elliptic": "^6.4.18",
"@types/express": "^5.0.3",
"@types/ffprobe": "^1.1.8",
"@types/fs-extra": "^11.0.4",
"@types/node": "^24.7.1",
"@types/node-forge": "^1.3.14",
"@commitlint/cli": "^20.2.0",
"@commitlint/config-conventional": "^20.2.0",
"@eslint/js": "^9.39.2",
"@types/express": "^5.0.6",
"@types/m3u8-parser": "^7.2.5",
"@types/node": "^25.0.3",
"@types/ws": "^8.18.1",
"@types/yargs": "^17.0.33",
"@typescript-eslint/eslint-plugin": "^8.46.0",
"@typescript-eslint/parser": "^8.46.0",
"@yao-pkg/pkg": "^6.8.0",
"eslint": "^9.37.0",
"@typescript-eslint/eslint-plugin": "^8.50.0",
"@typescript-eslint/parser": "^8.50.0",
"@yao-pkg/pkg": "^6.11.0",
"esbuild": "0.26.0",
"eslint": "^9.39.2",
"eslint-config-prettier": "^10.1.8",
"prettier": "^3.6.2",
"husky": "^9.1.7",
"jiti": "^2.6.1",
"prettier": "^3.7.4",
"removeNPMAbsolutePaths": "^3.0.1",
"ts-node": "^10.9.2",
"typescript": "^5.9.3",
"typescript-eslint": "^8.46.0"
"typescript-eslint": "^8.50.0"
},
"scripts": {
"prestart": "pnpm run tsc test",
@ -100,7 +87,7 @@
"tsc": "ts-node tsc.ts",
"eslint": "npx eslint . --quiet",
"prettier": "npx prettier . --check",
"prettier-fix": "npx prettier . --write",
"prettier:fix": "npx prettier . --write",
"pretest": "pnpm run tsc",
"proto:compile": "protoc --plugin=protoc-gen-ts_proto=.\\node_modules\\.bin\\protoc-gen-ts_proto.cmd --ts_proto_opt=\"esModuleInterop=true\" --ts_proto_opt=\"forceLong=long\" --ts_proto_opt=\"env=node\" --ts_proto_out=. modules/*.proto",
"prebuild-cli": "pnpm run tsc false false",
@ -123,7 +110,12 @@
"build-macos-arm64-gui": "pnpm run prebuild-gui && cd lib && node modules/build macos-arm64 true",
"build-alpine-x64-gui": "pnpm run prebuild-gui && cd lib && node modules/build alpine-x64 true",
"build-alpine-arm64-gui": "pnpm run prebuild-gui && cd lib && node modules/build alpine-arm64 true",
"test-x64": "pnpm run pretest && cd lib && node modules/build windows-x64 && node modules/build macos-x64 && node modules/build linuxstatic-x64 && node modules/build alpine-x64 && node modules/build linuxstatic-armv7",
"test-arm64": "pnpm run pretest && cd lib && node modules/build windows-arm64 && node modules/build macos-arm64 && node modules/build linuxstatic-arm64 && node modules/build alpine-x64"
"test-windows-x64": "pnpm run pretest && cd lib && node modules/build windows-x64",
"test-linux-x64": "pnpm run pretest && cd lib && node modules/build linuxstatic-x64 && node modules/build alpine-x64",
"test-macos-x64": "pnpm run pretest && cd lib && node modules/build macos-x64",
"test-windows-arm64": "pnpm run pretest && cd lib && node modules/build windows-arm64",
"test-linux-arm64": "pnpm run pretest && cd lib && node modules/build linuxstatic-arm64 && node modules/build alpine-x64",
"test-macos-arm64": "pnpm run pretest && cd lib && node modules/build macos-arm64",
"prepare": "husky"
}
}

File diff suppressed because it is too large Load diff

13
tsc.ts
View file

@ -1,7 +1,6 @@
import { ChildProcess, exec } from 'child_process';
import fs from 'fs-extra';
import path from 'path';
import { removeSync, copyFileSync } from 'fs-extra';
import fs from 'fs';
const argv = process.argv.slice(2);
let buildIgnore: string[] = [];
@ -29,7 +28,6 @@ const ignore = [
'*/*\\.tsx?$',
'./fonts*',
'./gui/react*',
'./dev.js$',
'*/node_modules/*',
'./widevine/*',
'./playready/*',
@ -57,15 +55,14 @@ export { ignore };
};
process.stdout.write('Removing lib dir... ');
removeSync('lib');
fs.rmSync('lib', { recursive: true, force: true });
process.stdout.write('✓\nRunning tsc... ');
const tsc = exec('npx tsc');
await waitForProcess(tsc);
if (!isGUI) {
fs.emptyDirSync(path.join('lib', 'gui'));
fs.rmdirSync(path.join('lib', 'gui'));
fs.rmSync(path.join('lib', 'gui'), { recursive: true, force: true });
}
if (!isTest && isGUI) {
@ -97,9 +94,9 @@ export { ignore };
files.forEach((item) => {
const itemPath = path.join(__dirname, 'lib', item.path.replace(__dirname, ''));
if (item.stats.isDirectory()) {
if (!fs.existsSync(itemPath)) fs.mkdirSync(itemPath);
if (!fs.existsSync(itemPath)) fs.mkdirSync(itemPath, { recursive: true });
} else {
copyFileSync(item.path, itemPath);
fs.cpSync(item.path, itemPath, { recursive: true });
}
});

View file

@ -1,7 +1,7 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "commonjs",
"module": "CommonJS",
"outDir": "./lib",
"strict": true,
"esModuleInterop": true,
@ -9,6 +9,7 @@
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"downlevelIteration": true,
"removeComments": true,
"jsx": "react"
},
"exclude": ["./videos", "./tsc.ts", "lib/**/*", "gui/react/**/*"]