ChinnaRengamani
commited on
Commit
•
11acfd9
1
Parent(s):
51589d2
This view is limited to 50 files because it contains too many changes.
See raw diff
- CHANGELOG.md +48 -0
- CONTRIBUTING.md +105 -0
- Dockerfile +58 -0
- LICENSE +21 -0
- README.md +1 -1
- api/index.ts +3 -0
- package.json +55 -0
- public/img/img1.gif +0 -0
- public/index.html +103 -0
- public/robots.txt +5 -0
- render.yaml +10 -0
- src/config/axios.ts +21 -0
- src/config/cors.ts +10 -0
- src/config/errorHandler.ts +11 -0
- src/config/notFoundHandler.ts +8 -0
- src/config/ratelimit.ts +17 -0
- src/controllers/animeAboutInfo.controller.ts +31 -0
- src/controllers/animeCategory.controller.ts +39 -0
- src/controllers/animeEpisodeSrcs.controller.ts +75 -0
- src/controllers/animeEpisodes.controller.ts +31 -0
- src/controllers/animeGenre.controller.ts +37 -0
- src/controllers/animeProducer.controller.ts +37 -0
- src/controllers/animeSearch.controller.ts +57 -0
- src/controllers/animeSearchSuggestion.controller.ts +31 -0
- src/controllers/episodeServers.controller.ts +30 -0
- src/controllers/estimatedSchedule.controller.ts +36 -0
- src/controllers/homePage.controller.ts +18 -0
- src/controllers/index.ts +25 -0
- src/extractors/index.ts +6 -0
- src/extractors/megacloud.ts +245 -0
- src/extractors/rapidcloud.ts +166 -0
- src/extractors/streamsb.ts +83 -0
- src/extractors/streamtape.ts +37 -0
- src/parsers/animeAboutInfo.ts +184 -0
- src/parsers/animeCategory.ts +118 -0
- src/parsers/animeEpisodeSrcs.ts +129 -0
- src/parsers/animeEpisodes.ts +61 -0
- src/parsers/animeGenre.ts +105 -0
- src/parsers/animeProducer.ts +120 -0
- src/parsers/animeSearch.ts +118 -0
- src/parsers/animeSearchSuggestion.ts +77 -0
- src/parsers/episodeServers.ts +75 -0
- src/parsers/estimatedSchedule.ts +67 -0
- src/parsers/homePage.ts +203 -0
- src/parsers/index.ts +25 -0
- src/routes/index.ts +55 -0
- src/server.ts +60 -0
- src/types/anime.ts +113 -0
- src/types/controllers/animeAboutInfo.ts +3 -0
- src/types/controllers/animeCategory.ts +7 -0
CHANGELOG.md
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# [1.29.0](https://github.com/ghoshRitesh12/aniwatch-api/compare/v1.28.0...v1.29.0) (2024-03-25)
|
2 |
+
|
3 |
+
|
4 |
+
### Features
|
5 |
+
|
6 |
+
* **advancedSearch:** add advanced related constants ([68e4c70](https://github.com/ghoshRitesh12/aniwatch-api/commit/68e4c70dd887805bc2784bcbfabf5328a1ad752a))
|
7 |
+
* **advancedSearch:** add advanced search feature ([1c02c9c](https://github.com/ghoshRitesh12/aniwatch-api/commit/1c02c9cf4f9c364c57a2f30471e676b5a5e5b5ca))
|
8 |
+
* **advancedSearch:** add helper types ([71f0905](https://github.com/ghoshRitesh12/aniwatch-api/commit/71f0905115e88a96f59aa4a52d1ce69a793ebe0c))
|
9 |
+
* **advancedSearch:** add utility methods related to advanced search ([79d0bdf](https://github.com/ghoshRitesh12/aniwatch-api/commit/79d0bdf05f86c5d5411f9473889442000786322f))
|
10 |
+
* **advancedSearch:** add utility props to search response ([d6f9f0f](https://github.com/ghoshRitesh12/aniwatch-api/commit/d6f9f0f665c9d03b38b88baa8156892b9a32b0af))
|
11 |
+
* **advancedSearch:** feat: add search filter parsing ([fef106d](https://github.com/ghoshRitesh12/aniwatch-api/commit/fef106da27270dcb86031e511a3cc428e40f41ff))
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
# [1.28.0](https://github.com/ghoshRitesh12/aniwatch-api/compare/v1.27.1...v1.28.0) (2024-03-12)
|
16 |
+
|
17 |
+
|
18 |
+
### Features
|
19 |
+
|
20 |
+
* add json rate limit response; replace `max` with `limit` ([870fae7](https://github.com/ghoshRitesh12/aniwatch-api/commit/870fae700b56cc20010296387e3d9cda8330560c))
|
21 |
+
* disabled `ratelimit` & `dont_sleep` for personal deployments ([8565619](https://github.com/ghoshRitesh12/aniwatch-api/commit/8565619f3ab2616b7fbeca7681f063730693a82e))
|
22 |
+
* update api home page ([112e532](https://github.com/ghoshRitesh12/aniwatch-api/commit/112e532331fa3001d263457bff001c201b89d136))
|
23 |
+
|
24 |
+
|
25 |
+
|
26 |
+
## [1.27.1](https://github.com/ghoshRitesh12/aniwatch-api/compare/v1.27.0...v1.27.1) (2024-03-03)
|
27 |
+
|
28 |
+
|
29 |
+
|
30 |
+
# [1.27.0](https://github.com/ghoshRitesh12/aniwatch-api/compare/v1.26.0...v1.27.0) (2024-03-03)
|
31 |
+
|
32 |
+
|
33 |
+
### Features
|
34 |
+
|
35 |
+
* add explicit interval time for convenience ([a4b08c4](https://github.com/ghoshRitesh12/aniwatch-api/commit/a4b08c435c0ed62c57a1a6a985e3eed25bb82c92))
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
# [1.26.0](https://github.com/ghoshRitesh12/aniwatch-api/compare/v1.25.0...v1.26.0) (2024-03-03)
|
40 |
+
|
41 |
+
|
42 |
+
### Features
|
43 |
+
|
44 |
+
* add axios config for future code reusability ([4782a8d](https://github.com/ghoshRitesh12/aniwatch-api/commit/4782a8dd708ec1f68bf469907024c082d606dc79))
|
45 |
+
* update rebranded domain name ([a6f99bf](https://github.com/ghoshRitesh12/aniwatch-api/commit/a6f99bf681d27483d6f214c48673b875d3cbf6ab))
|
46 |
+
|
47 |
+
|
48 |
+
|
CONTRIBUTING.md
ADDED
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Contributing to aniwatch-api
|
2 |
+
|
3 |
+
Thank you for your interest in contributing to aniwatch-api. We appreciate whatever form of contribution you are willing to make. Every contribution counts ✨
|
4 |
+
|
5 |
+
## Table of Contents
|
6 |
+
|
7 |
+
- [Types of contributions we are looking for](#types-of-contributions-we-are-looking-for)
|
8 |
+
- [Ground Rules & Expectations](#ground-rules--expectations)
|
9 |
+
- [How To Contribute](#how-to-contribute)
|
10 |
+
- [Prerequisites](#prerequisites)
|
11 |
+
- [Clone the repository](#clone-the-repository)
|
12 |
+
- [Project Structure](#project-structure)
|
13 |
+
- [Commit Messages](#commit-messages)
|
14 |
+
|
15 |
+
## Types of contributions we are looking for
|
16 |
+
|
17 |
+
In short, we welcome any sort of contribution you are willing to make as each and every contribution counts. We gladly accept contributions such as:
|
18 |
+
|
19 |
+
- Documentation improvements: from minor typos to major document overhauls
|
20 |
+
- Helping others by answering questions in pull requests.
|
21 |
+
- Fixing known [bugs](https://github.com/ghoshRitesh12/aniwatch-api/issues?q=is%3Aopen).
|
22 |
+
|
23 |
+
## Ground Rules & Expectations
|
24 |
+
|
25 |
+
Before we begin, here are a few things we anticipate from you and that you should expect from others:
|
26 |
+
|
27 |
+
- Be respectful and thoughtful in your conversations around this project. Each person may have their own views and opinions about the project. Try to listen to each other and reach an agreement or compromise.
|
28 |
+
|
29 |
+
## How To Contribute
|
30 |
+
|
31 |
+
If you'd like to contribute, start by searching through the [issues](https://github.com/ghoshRitesh12/aniwatch-api/issues) and [pull requests](https://github.com/ghoshRitesh12/aniwatch-api/pulls) to see whether someone else has raised a similar idea or question.
|
32 |
+
|
33 |
+
If you don't see your idea listed, and you think it fits into the goals of this guide, you may do one of the following:
|
34 |
+
|
35 |
+
- **If your contribution is minor,** such as a typo fix or new provider, consider opening a pull request.
|
36 |
+
- **If your contribution is major,** such as a major refactor, start by opening an issue first. That way, other people can weigh in on the discussion before you do any work.
|
37 |
+
|
38 |
+
## Prerequisites
|
39 |
+
|
40 |
+
To contribute to this project, you must know the following:
|
41 |
+
|
42 |
+
- [NodeJS](https://nodejs.org/)
|
43 |
+
- [TypeScript](https://www.typescriptlang.org/)
|
44 |
+
- Web Scraping
|
45 |
+
- [Cheerio](https://cheerio.js.org/)
|
46 |
+
- [Axios](https://axios-http.com/docs/intro)
|
47 |
+
- [CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Selectors)
|
48 |
+
- [Browser Dev Tools](https://developer.mozilla.org/en-US/docs/Learn/Common_questions/Tools_and_setup/What_are_browser_developer_tools)
|
49 |
+
|
50 |
+
### Clone the repository
|
51 |
+
|
52 |
+
1. [Fork the repository](https://github.com/ghoshRitesh12/aniwatch-api/fork)
|
53 |
+
2. Clone your fork to your local machine using the following command (replace <your_username> with your actual GitHub username)
|
54 |
+
|
55 |
+
```bash
|
56 |
+
git clone https://github.com/<your_username>/aniwatch-api
|
57 |
+
```
|
58 |
+
|
59 |
+
3. Creating a new branch <br/>
|
60 |
+
Replace \<new-branch-name> with any of the following naming conventions: <br/>
|
61 |
+
- `feature/<branch-name>` - for adding new features
|
62 |
+
- `bug/<branch-name>` - for fixing known bugs
|
63 |
+
- `misc/<branch-name>` - for anything other than bug or features
|
64 |
+
|
65 |
+
```bash
|
66 |
+
git checkout -b <new-branch-name>
|
67 |
+
```
|
68 |
+
|
69 |
+
### Project Structure
|
70 |
+
|
71 |
+
- `src` directory contains all the source code required for this project
|
72 |
+
|
73 |
+
- `controllers` directory contains all the controller logic
|
74 |
+
- `types` directory contains all types & interfaces used for this project
|
75 |
+
- `parsers` directory contains all the parsing aka scraping logic
|
76 |
+
- `routes` directory contains all the routers
|
77 |
+
- `utils` directory contains handy utility methods and properties
|
78 |
+
- `config` directory contains api configuration related files
|
79 |
+
- `extractors` directory contains anime streaming url extractor files
|
80 |
+
<br/><br/>
|
81 |
+
|
82 |
+
- `test` directory contains all the tests that needs to be evaluated
|
83 |
+
|
84 |
+
## Commit Messages
|
85 |
+
|
86 |
+
When you've made changes to one or more files, you have to commit that file. You also need a message for that commit.
|
87 |
+
|
88 |
+
We follow [Conventional Commit Messages](https://www.conventionalcommits.org/en/v1.0.0/#summary).
|
89 |
+
|
90 |
+
A brief overview:
|
91 |
+
|
92 |
+
- `feat`: A feature, possibly improving something already existing
|
93 |
+
- `fix`: A fix, for example of a bug
|
94 |
+
- `perf`: Performance related change
|
95 |
+
- `refactor`: Refactoring a specific section of the codebase
|
96 |
+
- `style`: Everything related to styling code like whitespaces, tabs, indenting, etc.
|
97 |
+
- `test`: Everything related to testing
|
98 |
+
- `docs`: Everything related to documentation
|
99 |
+
- `chore`: Code maintenance
|
100 |
+
|
101 |
+
Examples:
|
102 |
+
|
103 |
+
- `docs: fixed typo in readme`
|
104 |
+
- `feat: added a new category parser`
|
105 |
+
- `fix: fixed search results bug`
|
Dockerfile
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# build stage for building .ts files
|
2 |
+
FROM node:20-alpine as build
|
3 |
+
|
4 |
+
RUN mkdir /home/app
|
5 |
+
|
6 |
+
WORKDIR /home/app
|
7 |
+
|
8 |
+
COPY package.json .
|
9 |
+
|
10 |
+
RUN npm install --ignore-scripts
|
11 |
+
|
12 |
+
COPY . .
|
13 |
+
|
14 |
+
RUN npm run build
|
15 |
+
|
16 |
+
# prod stage for including only necessary files
|
17 |
+
FROM node:20-alpine as prod
|
18 |
+
|
19 |
+
LABEL org.opencontainers.image.source=https://github.com/ghoshRitesh12/aniwatch-api
|
20 |
+
LABEL org.opencontainers.image.description="Node.js API for obtaining anime information from aniwatch.to (formerly zoro.to) written in TypeScript, made with Cheerio & Axios"
|
21 |
+
LABEL org.opencontainers.image.description "Node.js API for obtaining anime information from aniwatch.to (formerly zoro.to) written in TypeScript, made with Cheerio & Axios"
|
22 |
+
LABEL org.opencontainers.image.licenses=MIT
|
23 |
+
|
24 |
+
# create a non-privileged user
|
25 |
+
RUN addgroup -S aniwatch && adduser -S zoro -G aniwatch
|
26 |
+
|
27 |
+
# set secure folder permissions
|
28 |
+
RUN mkdir -p /app/public /app/dist && chown -R zoro:aniwatch /app
|
29 |
+
|
30 |
+
# set non-privileged user
|
31 |
+
USER zoro
|
32 |
+
|
33 |
+
# set working directory
|
34 |
+
WORKDIR /app
|
35 |
+
|
36 |
+
# copy config file for better use of layers
|
37 |
+
COPY --chown=zoro:aniwatch package.json .
|
38 |
+
|
39 |
+
# install dependencies
|
40 |
+
RUN npm install --omit=dev --ignore-scripts
|
41 |
+
|
42 |
+
# copy public folder from build stage to prod
|
43 |
+
COPY --from=build --chown=zoro:aniwatch /home/app/public /app/public
|
44 |
+
|
45 |
+
# copy dist folder from build stage to prod
|
46 |
+
COPY --from=build --chown=zoro:aniwatch /home/app/dist /app/dist
|
47 |
+
|
48 |
+
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s CMD [ "npm", "run", "healthcheck" ]
|
49 |
+
|
50 |
+
ENV NODE_ENV=production
|
51 |
+
ENV PORT=4000
|
52 |
+
|
53 |
+
# exposed port
|
54 |
+
EXPOSE 4000
|
55 |
+
|
56 |
+
CMD [ "node", "dist/src/server.js" ]
|
57 |
+
|
58 |
+
# exit
|
LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2023 Ritesh Ghosh
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
README.md
CHANGED
@@ -7,4 +7,4 @@ sdk: docker
|
|
7 |
pinned: false
|
8 |
---
|
9 |
|
10 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
7 |
pinned: false
|
8 |
---
|
9 |
|
10 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
api/index.ts
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
import app from "../src/server.js";
|
2 |
+
|
3 |
+
export default app;
|
package.json
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "aniwatch-api",
|
3 |
+
"version": "1.29.0",
|
4 |
+
"description": "Node.js API for obtaining anime information from hianime.to (formerly aniwatch.to) written in TypeScript, made with Cheerio & Axios",
|
5 |
+
"main": "src/server.ts",
|
6 |
+
"type": "module",
|
7 |
+
"scripts": {
|
8 |
+
"start": "tsx src/server.ts",
|
9 |
+
"dev": "tsx watch src/server.ts",
|
10 |
+
"build": "tsc -p tsconfig.json",
|
11 |
+
"vercel-build": "echo \"Hello\"",
|
12 |
+
"prepare": "husky install",
|
13 |
+
"test": "vitest run --config vitest.config.ts",
|
14 |
+
"healthcheck": "curl -f http://localhost:4000/health"
|
15 |
+
},
|
16 |
+
"repository": {
|
17 |
+
"type": "git",
|
18 |
+
"url": "git+https://github.com/ghoshRitesh12/aniwatch-api.git"
|
19 |
+
},
|
20 |
+
"bugs": {
|
21 |
+
"url": "https://github.com/ghoshRitesh12/aniwatch-api/issues"
|
22 |
+
},
|
23 |
+
"homepage": "https://github.com/ghoshRitesh12/aniwatch-api#readme",
|
24 |
+
"keywords": [
|
25 |
+
"anime",
|
26 |
+
"weeb",
|
27 |
+
"hianime",
|
28 |
+
"scraper"
|
29 |
+
],
|
30 |
+
"author": "https://github.com/ghoshRitesh12",
|
31 |
+
"license": "MIT",
|
32 |
+
"dependencies": {
|
33 |
+
"axios": "^1.6.5",
|
34 |
+
"cheerio": "1.0.0-rc.12",
|
35 |
+
"cors": "^2.8.5",
|
36 |
+
"crypto-js": "^4.2.0",
|
37 |
+
"dotenv": "^16.3.1",
|
38 |
+
"express": "^4.18.2",
|
39 |
+
"express-rate-limit": "^7.1.5",
|
40 |
+
"http-errors": "^2.0.0",
|
41 |
+
"morgan": "^1.10.0"
|
42 |
+
},
|
43 |
+
"devDependencies": {
|
44 |
+
"@types/cors": "^2.8.17",
|
45 |
+
"@types/crypto-js": "^4.2.1",
|
46 |
+
"@types/express": "^4.17.21",
|
47 |
+
"@types/http-errors": "^2.0.4",
|
48 |
+
"@types/morgan": "^1.9.9",
|
49 |
+
"@types/node": "^20.11.5",
|
50 |
+
"husky": "^8.0.3",
|
51 |
+
"tsx": "^4.7.0",
|
52 |
+
"typescript": "^5.3.3",
|
53 |
+
"vitest": "^1.2.1"
|
54 |
+
}
|
55 |
+
}
|
public/img/img1.gif
ADDED
public/index.html
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
|
4 |
+
<head>
|
5 |
+
<meta charset="utf-8">
|
6 |
+
<title>Aniwatch API</title>
|
7 |
+
<meta name="content-language" content="en">
|
8 |
+
<meta http-equiv="Content-Security-Policy"
|
9 |
+
content="default-src 'self' https://api-aniwatch.onrender.com; style-src 'unsafe-inline'; img-src 'self' https://raw.githubusercontent.com">
|
10 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
11 |
+
<meta name="title" content="Aniwatch API">
|
12 |
+
<link rel="canonical" href="https://api-aniwatch.onrender.com">
|
13 |
+
<meta name="robots" content="index, follow, max-image-preview:large, max-snippet:-1, max-video-preview:-1">
|
14 |
+
<meta property="og:image"
|
15 |
+
content="https://raw.githubusercontent.com/ghoshRitesh12/aniwatch-api/main/public/img/img1.gif">
|
16 |
+
<meta property="og:url" content="https://api-aniwatch.onrender.com">
|
17 |
+
<meta property="og:image:width" content="457">
|
18 |
+
<meta property="og:image:height" content="480">
|
19 |
+
<meta property="og:site_name" content="aniwatch">
|
20 |
+
<meta property="og:type" content="website">
|
21 |
+
<meta name="twitter:card" content="summary_large_image">
|
22 |
+
<meta name="twitter:site" content="@aniwatch-api">
|
23 |
+
<meta name="twitter:title" content="Aniwatch API">
|
24 |
+
<meta name="twitter:description"
|
25 |
+
content="Node Express API for obtaining anime information from aniwatch.to (formerly zoro.to) written in TypeScript, made with Cheerio & Axios">
|
26 |
+
<meta name="twitter:image:src"
|
27 |
+
content="https://raw.githubusercontent.com/ghoshRitesh12/aniwatch-api/main/public/img/img1.gif">
|
28 |
+
<meta name="keywords" content="aniwatch api scraper anime zoro node express typescript">
|
29 |
+
<meta property="og:title" content="Aniwatch API">
|
30 |
+
<meta name="description"
|
31 |
+
content="Node Express API for obtaining anime information from aniwatch.to (formerly zoro.to) written in TypeScript, made with Cheerio & Axios">
|
32 |
+
<meta property="og:description"
|
33 |
+
content="Node Express API for obtaining anime information from aniwatch.to (formerly zoro.to) written in TypeScript, made with Cheerio & Axios">
|
34 |
+
<link rel="shortcut icon"
|
35 |
+
href="https://raw.githubusercontent.com/ghoshRitesh12/aniwatch-api/main/public/img/img1.gif">
|
36 |
+
|
37 |
+
<style>
|
38 |
+
html {
|
39 |
+
color-scheme: dark;
|
40 |
+
}
|
41 |
+
|
42 |
+
body {
|
43 |
+
display: flex;
|
44 |
+
align-items: center;
|
45 |
+
justify-content: start;
|
46 |
+
flex-direction: column;
|
47 |
+
font-family: sans-serif;
|
48 |
+
background: #18161A;
|
49 |
+
padding: 10rem 1rem 0 1rem;
|
50 |
+
}
|
51 |
+
|
52 |
+
h2 {
|
53 |
+
background-clip: text;
|
54 |
+
background: linear-gradient(to left, #ff5a7e 40%, #00aeff);
|
55 |
+
text-align: center;
|
56 |
+
font-weight: 600;
|
57 |
+
color: transparent;
|
58 |
+
-webkit-background-clip: text;
|
59 |
+
-webkit-text-fill-color: transparent;
|
60 |
+
}
|
61 |
+
|
62 |
+
a {
|
63 |
+
color: #00AEDD;
|
64 |
+
}
|
65 |
+
|
66 |
+
img {
|
67 |
+
object-fit: cover;
|
68 |
+
max-width: 100%;
|
69 |
+
font-size: .9rem;
|
70 |
+
}
|
71 |
+
|
72 |
+
.img-wrapper {
|
73 |
+
aspect-ratio: 1/1;
|
74 |
+
max-width: 12rem;
|
75 |
+
border-radius: 1rem;
|
76 |
+
overflow: hidden;
|
77 |
+
width: fit-content;
|
78 |
+
margin: 1rem auto 1rem auto;
|
79 |
+
}
|
80 |
+
</style>
|
81 |
+
</head>
|
82 |
+
|
83 |
+
|
84 |
+
<body>
|
85 |
+
<h2>
|
86 |
+
Welcome to the unofficial
|
87 |
+
<a href="https://hianime.to" style="text-decoration: underline;">hianime.to (formerly aniwatch.to)</a>
|
88 |
+
api
|
89 |
+
<span style="-webkit-text-fill-color: white">⚔️</span>
|
90 |
+
</h2>
|
91 |
+
<div style="font-size: 1.2rem; text-align: center;">
|
92 |
+
<div class="img-wrapper">
|
93 |
+
<img draggable="false" src="https://raw.githubusercontent.com/ghoshRitesh12/aniwatch-api/main/public/img/img1.gif"
|
94 |
+
alt="kawai anime girl" decoding="async" fetchpriority="high" />
|
95 |
+
</div>
|
96 |
+
<a href="https://github.com/ghoshritesh12/aniwatch-api#documentation" rel="noopener noreferer">
|
97 |
+
Visit docs for more information
|
98 |
+
</a>
|
99 |
+
</div>
|
100 |
+
</body>
|
101 |
+
|
102 |
+
|
103 |
+
</html>
|
public/robots.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# START aniwatch-api
|
2 |
+
Disallow:
|
3 |
+
|
4 |
+
User-agent: *
|
5 |
+
# END aniwatch-api
|
render.yaml
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
services:
|
2 |
+
- type: web
|
3 |
+
name: aniwatch-api
|
4 |
+
runtime: docker
|
5 |
+
repo: https://github.com/ghoshRitesh12/aniwatch-api.git
|
6 |
+
plan: free
|
7 |
+
branch: main
|
8 |
+
envVars:
|
9 |
+
- key: PORT
|
10 |
+
value: 4000
|
src/config/axios.ts
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import axios, { AxiosError, type AxiosRequestConfig } from "axios";
|
2 |
+
import {
|
3 |
+
SRC_BASE_URL,
|
4 |
+
ACCEPT_HEADER,
|
5 |
+
USER_AGENT_HEADER,
|
6 |
+
ACCEPT_ENCODING_HEADER,
|
7 |
+
} from "../utils/constants.js";
|
8 |
+
|
9 |
+
const clientConfig: AxiosRequestConfig = {
|
10 |
+
timeout: 10000,
|
11 |
+
baseURL: SRC_BASE_URL,
|
12 |
+
headers: {
|
13 |
+
Accept: ACCEPT_HEADER,
|
14 |
+
"User-Agent": USER_AGENT_HEADER,
|
15 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
16 |
+
},
|
17 |
+
};
|
18 |
+
|
19 |
+
const client = axios.create(clientConfig);
|
20 |
+
|
21 |
+
export { client, AxiosError };
|
src/config/cors.ts
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cors from "cors";
|
2 |
+
|
3 |
+
const corsConfig = cors({
|
4 |
+
origin: "*",
|
5 |
+
methods: "GET",
|
6 |
+
credentials: true,
|
7 |
+
optionsSuccessStatus: 200,
|
8 |
+
});
|
9 |
+
|
10 |
+
export default corsConfig;
|
src/config/errorHandler.ts
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { ErrorRequestHandler } from "express";
|
2 |
+
|
3 |
+
const errorHandler: ErrorRequestHandler = (error, req, res, next) => {
|
4 |
+
const status = error?.status || 500;
|
5 |
+
res.status(status).json({
|
6 |
+
status,
|
7 |
+
message: error?.message || "Something Went Wrong",
|
8 |
+
});
|
9 |
+
};
|
10 |
+
|
11 |
+
export default errorHandler;
|
src/config/notFoundHandler.ts
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { RequestHandler } from "express";
|
2 |
+
import createHttpError from "http-errors";
|
3 |
+
|
4 |
+
const notFoundHandler: RequestHandler = (req, res, next) => {
|
5 |
+
return next(createHttpError.NotFound());
|
6 |
+
};
|
7 |
+
|
8 |
+
export default notFoundHandler;
|
src/config/ratelimit.ts
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { config } from "dotenv";
|
2 |
+
import createHttpError from "http-errors";
|
3 |
+
import { rateLimit } from "express-rate-limit";
|
4 |
+
|
5 |
+
config();
|
6 |
+
|
7 |
+
export const ratelimit = rateLimit({
|
8 |
+
windowMs: Number(process.env.WINDOWMS) || 30 * 60 * 1000,
|
9 |
+
limit: Number(process.env.MAX) || 50,
|
10 |
+
legacyHeaders: true,
|
11 |
+
standardHeaders: "draft-7",
|
12 |
+
handler: function (_, __, next) {
|
13 |
+
next(
|
14 |
+
createHttpError.TooManyRequests("Too many API requests, try again later")
|
15 |
+
);
|
16 |
+
},
|
17 |
+
});
|
src/controllers/animeAboutInfo.controller.ts
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeAnimeAboutInfo } from "../parsers/index.js";
|
4 |
+
import { type AnimeAboutInfoQueryParams } from "../types/controllers/index.js";
|
5 |
+
|
6 |
+
// /anime/info?id=${anime-id}
|
7 |
+
const getAnimeAboutInfo: RequestHandler<
|
8 |
+
unknown,
|
9 |
+
Awaited<ReturnType<typeof scrapeAnimeAboutInfo>>,
|
10 |
+
unknown,
|
11 |
+
AnimeAboutInfoQueryParams
|
12 |
+
> = async (req, res, next) => {
|
13 |
+
try {
|
14 |
+
const animeId = req.query.id
|
15 |
+
? decodeURIComponent(req.query.id as string)
|
16 |
+
: null;
|
17 |
+
|
18 |
+
if (animeId === null) {
|
19 |
+
throw createHttpError.BadRequest("Anime unique id required");
|
20 |
+
}
|
21 |
+
|
22 |
+
const data = await scrapeAnimeAboutInfo(animeId);
|
23 |
+
|
24 |
+
res.status(200).json(data);
|
25 |
+
} catch (err: any) {
|
26 |
+
console.error(err);
|
27 |
+
next(err);
|
28 |
+
}
|
29 |
+
};
|
30 |
+
|
31 |
+
export default getAnimeAboutInfo;
|
src/controllers/animeCategory.controller.ts
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import type { RequestHandler } from "express";
|
3 |
+
import type { AnimeCategories } from "../types/anime.js";
|
4 |
+
import { scrapeAnimeCategory } from "../parsers/index.js";
|
5 |
+
import type {
|
6 |
+
CategoryAnimePathParams,
|
7 |
+
CategoryAnimeQueryParams,
|
8 |
+
} from "../types/controllers/index.js";
|
9 |
+
|
10 |
+
// /anime/:category?page=${page}
|
11 |
+
const getAnimeCategory: RequestHandler<
|
12 |
+
CategoryAnimePathParams,
|
13 |
+
Awaited<ReturnType<typeof scrapeAnimeCategory>>,
|
14 |
+
unknown,
|
15 |
+
CategoryAnimeQueryParams
|
16 |
+
> = async (req, res, next) => {
|
17 |
+
try {
|
18 |
+
const category = req.params.category
|
19 |
+
? decodeURIComponent(req.params.category)
|
20 |
+
: null;
|
21 |
+
|
22 |
+
const page: number = req.query.page
|
23 |
+
? Number(decodeURIComponent(req.query?.page as string))
|
24 |
+
: 1;
|
25 |
+
|
26 |
+
if (category === null) {
|
27 |
+
throw createHttpError.BadRequest("category required");
|
28 |
+
}
|
29 |
+
|
30 |
+
const data = await scrapeAnimeCategory(category as AnimeCategories, page);
|
31 |
+
|
32 |
+
res.status(200).json(data);
|
33 |
+
} catch (err: any) {
|
34 |
+
console.error(err);
|
35 |
+
next(err);
|
36 |
+
}
|
37 |
+
};
|
38 |
+
|
39 |
+
export default getAnimeCategory;
|
src/controllers/animeEpisodeSrcs.controller.ts
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import axios from "axios";
|
2 |
+
import createHttpError from "http-errors";
|
3 |
+
import { type RequestHandler } from "express";
|
4 |
+
import { type CheerioAPI, load } from "cheerio";
|
5 |
+
import { scrapeAnimeEpisodeSources } from "../parsers/index.js";
|
6 |
+
import { USER_AGENT_HEADER, SRC_BASE_URL } from "../utils/constants.js";
|
7 |
+
import { type AnimeServers, Servers } from "../types/anime.js";
|
8 |
+
import { type AnimeEpisodeSrcsQueryParams } from "../types/controllers/index.js";
|
9 |
+
|
10 |
+
type AnilistID = number | null;
|
11 |
+
type MalID = number | null;
|
12 |
+
|
13 |
+
// /anime/episode-srcs?id=${episodeId}?server=${server}&category=${category (dub or sub)}
|
14 |
+
const getAnimeEpisodeSources: RequestHandler<
|
15 |
+
unknown,
|
16 |
+
Awaited<ReturnType<typeof scrapeAnimeEpisodeSources & AnilistID>>,
|
17 |
+
unknown,
|
18 |
+
AnimeEpisodeSrcsQueryParams
|
19 |
+
> = async (req, res, next) => {
|
20 |
+
try {
|
21 |
+
const episodeId = req.query.id ? decodeURIComponent(req.query.id) : null;
|
22 |
+
|
23 |
+
const server = (
|
24 |
+
req.query.server
|
25 |
+
? decodeURIComponent(req.query.server)
|
26 |
+
: Servers.VidStreaming
|
27 |
+
) as AnimeServers;
|
28 |
+
|
29 |
+
const category = (
|
30 |
+
req.query.category ? decodeURIComponent(req.query.category) : "sub"
|
31 |
+
) as "sub" | "dub";
|
32 |
+
|
33 |
+
if (episodeId === null) {
|
34 |
+
throw createHttpError.BadRequest("Anime episode id required");
|
35 |
+
}
|
36 |
+
|
37 |
+
let malID: MalID;
|
38 |
+
let anilistID: AnilistID;
|
39 |
+
const animeURL = new URL(episodeId?.split("?ep=")[0], SRC_BASE_URL)?.href;
|
40 |
+
|
41 |
+
const [episodeSrcData, animeSrc] = await Promise.all([
|
42 |
+
scrapeAnimeEpisodeSources(episodeId, server, category),
|
43 |
+
axios.get(animeURL, {
|
44 |
+
headers: {
|
45 |
+
Referer: SRC_BASE_URL,
|
46 |
+
"User-Agent": USER_AGENT_HEADER,
|
47 |
+
"X-Requested-With": "XMLHttpRequest",
|
48 |
+
},
|
49 |
+
}),
|
50 |
+
]);
|
51 |
+
|
52 |
+
const $: CheerioAPI = load(animeSrc?.data);
|
53 |
+
|
54 |
+
try {
|
55 |
+
anilistID = Number(
|
56 |
+
JSON.parse($("body")?.find("#syncData")?.text())?.anilist_id
|
57 |
+
);
|
58 |
+
malID = Number(JSON.parse($("body")?.find("#syncData")?.text())?.mal_id);
|
59 |
+
} catch (err) {
|
60 |
+
anilistID = null;
|
61 |
+
malID = null;
|
62 |
+
}
|
63 |
+
|
64 |
+
res.status(200).json({
|
65 |
+
...episodeSrcData,
|
66 |
+
anilistID,
|
67 |
+
malID,
|
68 |
+
});
|
69 |
+
} catch (err: any) {
|
70 |
+
console.error(err);
|
71 |
+
next(err);
|
72 |
+
}
|
73 |
+
};
|
74 |
+
|
75 |
+
export default getAnimeEpisodeSources;
|
src/controllers/animeEpisodes.controller.ts
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeAnimeEpisodes } from "../parsers/index.js";
|
4 |
+
import { type AnimeEpisodePathParams } from "../types/controllers/index.js";
|
5 |
+
|
6 |
+
// /anime/episodes/${anime-id}
|
7 |
+
const getAnimeEpisodes: RequestHandler<
|
8 |
+
AnimeEpisodePathParams,
|
9 |
+
Awaited<ReturnType<typeof scrapeAnimeEpisodes>>,
|
10 |
+
unknown,
|
11 |
+
unknown
|
12 |
+
> = async (req, res, next) => {
|
13 |
+
try {
|
14 |
+
const animeId = req.params.animeId
|
15 |
+
? decodeURIComponent(req.params.animeId)
|
16 |
+
: null;
|
17 |
+
|
18 |
+
if (animeId === null) {
|
19 |
+
throw createHttpError.BadRequest("Anime Id required");
|
20 |
+
}
|
21 |
+
|
22 |
+
const data = await scrapeAnimeEpisodes(animeId);
|
23 |
+
|
24 |
+
res.status(200).json(data);
|
25 |
+
} catch (err: any) {
|
26 |
+
console.error(err);
|
27 |
+
next(err);
|
28 |
+
}
|
29 |
+
};
|
30 |
+
|
31 |
+
export default getAnimeEpisodes;
|
src/controllers/animeGenre.controller.ts
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeGenreAnime } from "../parsers/index.js";
|
4 |
+
import type {
|
5 |
+
GenreAnimePathParams,
|
6 |
+
GenreAnimeQueryParams,
|
7 |
+
} from "../types/controllers/index.js";
|
8 |
+
|
9 |
+
// /anime/genre/${name}?page=${page}
|
10 |
+
const getGenreAnime: RequestHandler<
|
11 |
+
GenreAnimePathParams,
|
12 |
+
Awaited<ReturnType<typeof scrapeGenreAnime>>,
|
13 |
+
unknown,
|
14 |
+
GenreAnimeQueryParams
|
15 |
+
> = async (req, res, next) => {
|
16 |
+
try {
|
17 |
+
const name: string | null = req.params.name
|
18 |
+
? decodeURIComponent(req.params.name as string)
|
19 |
+
: null;
|
20 |
+
|
21 |
+
const page: number = req.query.page
|
22 |
+
? Number(decodeURIComponent(req.query?.page as string))
|
23 |
+
: 1;
|
24 |
+
|
25 |
+
if (name === null) {
|
26 |
+
throw createHttpError.BadRequest("Anime genre required");
|
27 |
+
}
|
28 |
+
|
29 |
+
const data = await scrapeGenreAnime(name, page);
|
30 |
+
res.status(200).json(data);
|
31 |
+
} catch (err: any) {
|
32 |
+
console.error(err);
|
33 |
+
next(err);
|
34 |
+
}
|
35 |
+
};
|
36 |
+
|
37 |
+
export default getGenreAnime;
|
src/controllers/animeProducer.controller.ts
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeProducerAnimes } from "../parsers/index.js";
|
4 |
+
import type {
|
5 |
+
AnimeProducerPathParams,
|
6 |
+
AnimeProducerQueryParams,
|
7 |
+
} from "../types/controllers/index.js";
|
8 |
+
|
9 |
+
// /anime/producer/${name}?page=${page}
|
10 |
+
const getProducerAnimes: RequestHandler<
|
11 |
+
AnimeProducerPathParams,
|
12 |
+
Awaited<ReturnType<typeof scrapeProducerAnimes>>,
|
13 |
+
unknown,
|
14 |
+
AnimeProducerQueryParams
|
15 |
+
> = async (req, res, next) => {
|
16 |
+
try {
|
17 |
+
const name: string | null = req.params.name
|
18 |
+
? decodeURIComponent(req.params.name as string)
|
19 |
+
: null;
|
20 |
+
|
21 |
+
const page: number = req.query.page
|
22 |
+
? Number(decodeURIComponent(req.query?.page as string))
|
23 |
+
: 1;
|
24 |
+
|
25 |
+
if (name === null) {
|
26 |
+
throw createHttpError.BadRequest("Anime producer name required");
|
27 |
+
}
|
28 |
+
|
29 |
+
const data = await scrapeProducerAnimes(name, page);
|
30 |
+
res.status(200).json(data);
|
31 |
+
} catch (err: any) {
|
32 |
+
console.error(err);
|
33 |
+
next(err);
|
34 |
+
}
|
35 |
+
};
|
36 |
+
|
37 |
+
export default getProducerAnimes;
|
src/controllers/animeSearch.controller.ts
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeAnimeSearch } from "../parsers/index.js";
|
4 |
+
import type {
|
5 |
+
SearchFilters,
|
6 |
+
AnimeSearchQueryParams,
|
7 |
+
} from "../types/controllers/index.js";
|
8 |
+
|
9 |
+
const searchFilters: Record<string, boolean> = {
|
10 |
+
filter: true,
|
11 |
+
type: true,
|
12 |
+
status: true,
|
13 |
+
rated: true,
|
14 |
+
score: true,
|
15 |
+
season: true,
|
16 |
+
language: true,
|
17 |
+
start_date: true,
|
18 |
+
end_date: true,
|
19 |
+
sort: true,
|
20 |
+
genres: true,
|
21 |
+
} as const;
|
22 |
+
|
23 |
+
// /anime/search?q=${query}&page=${page}
|
24 |
+
const getAnimeSearch: RequestHandler<
|
25 |
+
unknown,
|
26 |
+
Awaited<ReturnType<typeof scrapeAnimeSearch>>,
|
27 |
+
unknown,
|
28 |
+
AnimeSearchQueryParams
|
29 |
+
> = async (req, res, next) => {
|
30 |
+
try {
|
31 |
+
let { q: query, page, ...filters } = req.query;
|
32 |
+
|
33 |
+
query = query ? decodeURIComponent(query) : undefined;
|
34 |
+
const pageNo = page ? Number(decodeURIComponent(page as string)) : 1;
|
35 |
+
|
36 |
+
if (query === undefined) {
|
37 |
+
throw createHttpError.BadRequest("Search keyword required");
|
38 |
+
}
|
39 |
+
|
40 |
+
const parsedFilters: SearchFilters = {};
|
41 |
+
for (const key in filters) {
|
42 |
+
if (searchFilters[key]) {
|
43 |
+
parsedFilters[key as keyof SearchFilters] =
|
44 |
+
filters[key as keyof SearchFilters];
|
45 |
+
}
|
46 |
+
}
|
47 |
+
|
48 |
+
const data = await scrapeAnimeSearch(query, pageNo, parsedFilters);
|
49 |
+
|
50 |
+
res.status(200).json(data);
|
51 |
+
} catch (err: any) {
|
52 |
+
console.error(err);
|
53 |
+
next(err);
|
54 |
+
}
|
55 |
+
};
|
56 |
+
|
57 |
+
export default getAnimeSearch;
|
src/controllers/animeSearchSuggestion.controller.ts
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeAnimeSearchSuggestion } from "../parsers/index.js";
|
4 |
+
import { type AnimeSearchSuggestQueryParams } from "../types/controllers/index.js";
|
5 |
+
|
6 |
+
// /anime/search/suggest?q=${query}
|
7 |
+
const getAnimeSearchSuggestion: RequestHandler<
|
8 |
+
unknown,
|
9 |
+
Awaited<ReturnType<typeof scrapeAnimeSearchSuggestion>>,
|
10 |
+
unknown,
|
11 |
+
AnimeSearchSuggestQueryParams
|
12 |
+
> = async (req, res, next) => {
|
13 |
+
try {
|
14 |
+
const query: string | null = req.query.q
|
15 |
+
? decodeURIComponent(req.query.q as string)
|
16 |
+
: null;
|
17 |
+
|
18 |
+
if (query === null) {
|
19 |
+
throw createHttpError.BadRequest("Search keyword required");
|
20 |
+
}
|
21 |
+
|
22 |
+
const data = await scrapeAnimeSearchSuggestion(query);
|
23 |
+
|
24 |
+
res.status(200).json(data);
|
25 |
+
} catch (err: any) {
|
26 |
+
console.error(err);
|
27 |
+
next(err);
|
28 |
+
}
|
29 |
+
};
|
30 |
+
|
31 |
+
export default getAnimeSearchSuggestion;
|
src/controllers/episodeServers.controller.ts
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeEpisodeServers } from "../parsers/index.js";
|
4 |
+
import { type EpisodeServersQueryParams } from "../types/controllers/index.js";
|
5 |
+
|
6 |
+
// /anime/servers?episodeId=${id}
|
7 |
+
const getEpisodeServers: RequestHandler<
|
8 |
+
unknown,
|
9 |
+
Awaited<ReturnType<typeof scrapeEpisodeServers>>,
|
10 |
+
unknown,
|
11 |
+
EpisodeServersQueryParams
|
12 |
+
> = async (req, res, next) => {
|
13 |
+
try {
|
14 |
+
const episodeId = req.query.episodeId
|
15 |
+
? decodeURIComponent(req.query?.episodeId as string)
|
16 |
+
: null;
|
17 |
+
|
18 |
+
if (episodeId === null) {
|
19 |
+
throw createHttpError.BadRequest("Episode id required");
|
20 |
+
}
|
21 |
+
|
22 |
+
const data = await scrapeEpisodeServers(episodeId);
|
23 |
+
res.status(200).json(data);
|
24 |
+
} catch (err: any) {
|
25 |
+
console.error(err);
|
26 |
+
next(err);
|
27 |
+
}
|
28 |
+
};
|
29 |
+
|
30 |
+
export default getEpisodeServers;
|
src/controllers/estimatedSchedule.controller.ts
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import createHttpError from "http-errors";
|
2 |
+
import { type RequestHandler } from "express";
|
3 |
+
import { scrapeEstimatedSchedule } from "../parsers/index.js";
|
4 |
+
import { type EstimatedScheduleQueryParams } from "../types/controllers/index.js";
|
5 |
+
|
6 |
+
// /anime/schedule?date=${date}
|
7 |
+
const getEstimatedSchedule: RequestHandler<
|
8 |
+
unknown,
|
9 |
+
Awaited<ReturnType<typeof scrapeEstimatedSchedule>>,
|
10 |
+
unknown,
|
11 |
+
EstimatedScheduleQueryParams
|
12 |
+
> = async (req, res, next) => {
|
13 |
+
try {
|
14 |
+
const dateQuery = req.query.date
|
15 |
+
? decodeURIComponent(req.query.date as string)
|
16 |
+
: null;
|
17 |
+
|
18 |
+
if (dateQuery === null) {
|
19 |
+
throw createHttpError.BadRequest("Date payload required");
|
20 |
+
}
|
21 |
+
if (!/^\d{4}-\d{2}-\d{2}$/.test(dateQuery)) {
|
22 |
+
throw createHttpError.BadRequest(
|
23 |
+
"Invalid date payload format. Months and days must have 2 digits"
|
24 |
+
);
|
25 |
+
}
|
26 |
+
|
27 |
+
const data = await scrapeEstimatedSchedule(dateQuery);
|
28 |
+
|
29 |
+
res.status(200).json(data);
|
30 |
+
} catch (err: any) {
|
31 |
+
console.error(err);
|
32 |
+
next(err);
|
33 |
+
}
|
34 |
+
};
|
35 |
+
|
36 |
+
export default getEstimatedSchedule;
|
src/controllers/homePage.controller.ts
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { type RequestHandler } from "express";
|
2 |
+
import { scrapeHomePage } from "../parsers/index.js";
|
3 |
+
|
4 |
+
// /anime/home
|
5 |
+
const getHomePageInfo: RequestHandler<
|
6 |
+
unknown,
|
7 |
+
Awaited<ReturnType<typeof scrapeHomePage>>
|
8 |
+
> = async (req, res, next) => {
|
9 |
+
try {
|
10 |
+
const data = await scrapeHomePage();
|
11 |
+
res.status(200).json(data);
|
12 |
+
} catch (err: any) {
|
13 |
+
console.error(err);
|
14 |
+
next(err);
|
15 |
+
}
|
16 |
+
};
|
17 |
+
|
18 |
+
export default getHomePageInfo;
|
src/controllers/index.ts
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import getGenreAnime from "./animeGenre.controller.js";
|
2 |
+
import getHomePageInfo from "./homePage.controller.js";
|
3 |
+
import getAnimeSearch from "./animeSearch.controller.js";
|
4 |
+
import getAnimeEpisodes from "./animeEpisodes.controller.js";
|
5 |
+
import getAnimeCategory from "./animeCategory.controller.js";
|
6 |
+
import getProducerAnimes from "./animeProducer.controller.js";
|
7 |
+
import getEpisodeServers from "./episodeServers.controller.js";
|
8 |
+
import getAnimeAboutInfo from "./animeAboutInfo.controller.js";
|
9 |
+
import getEstimatedSchedule from "./estimatedSchedule.controller.js";
|
10 |
+
import getAnimeEpisodeSources from "./animeEpisodeSrcs.controller.js";
|
11 |
+
import getAnimeSearchSuggestion from "./animeSearchSuggestion.controller.js";
|
12 |
+
|
13 |
+
export {
|
14 |
+
getGenreAnime,
|
15 |
+
getAnimeSearch,
|
16 |
+
getHomePageInfo,
|
17 |
+
getAnimeEpisodes,
|
18 |
+
getAnimeCategory,
|
19 |
+
getEpisodeServers,
|
20 |
+
getProducerAnimes,
|
21 |
+
getAnimeAboutInfo,
|
22 |
+
getEstimatedSchedule,
|
23 |
+
getAnimeEpisodeSources,
|
24 |
+
getAnimeSearchSuggestion,
|
25 |
+
};
|
src/extractors/index.ts
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import StreamSB from "./streamsb.js";
|
2 |
+
import StreamTape from "./streamtape.js";
|
3 |
+
import RapidCloud from "./rapidcloud.js";
|
4 |
+
import MegaCloud from "./megacloud.js";
|
5 |
+
|
6 |
+
export { StreamSB, StreamTape, RapidCloud, MegaCloud };
|
src/extractors/megacloud.ts
ADDED
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import axios from "axios";
|
2 |
+
import crypto from "crypto";
|
3 |
+
import createHttpError from "http-errors";
|
4 |
+
|
5 |
+
// https://megacloud.tv/embed-2/e-1/dBqCr5BcOhnD?k=1
|
6 |
+
|
7 |
+
const megacloud = {
|
8 |
+
script: "https://megacloud.tv/js/player/a/prod/e1-player.min.js?v=",
|
9 |
+
sources: "https://megacloud.tv/embed-2/ajax/e-1/getSources?id=",
|
10 |
+
} as const;
|
11 |
+
|
12 |
+
type track = {
|
13 |
+
file: string;
|
14 |
+
kind: string;
|
15 |
+
label?: string;
|
16 |
+
default?: boolean;
|
17 |
+
};
|
18 |
+
|
19 |
+
type intro_outro = {
|
20 |
+
start: number;
|
21 |
+
end: number;
|
22 |
+
};
|
23 |
+
|
24 |
+
type unencryptedSrc = {
|
25 |
+
file: string;
|
26 |
+
type: string;
|
27 |
+
};
|
28 |
+
|
29 |
+
type extractedSrc = {
|
30 |
+
sources: string | unencryptedSrc[];
|
31 |
+
tracks: track[];
|
32 |
+
encrypted: boolean;
|
33 |
+
intro: intro_outro;
|
34 |
+
outro: intro_outro;
|
35 |
+
server: number;
|
36 |
+
};
|
37 |
+
|
38 |
+
interface ExtractedData
|
39 |
+
extends Pick<extractedSrc, "intro" | "outro" | "tracks"> {
|
40 |
+
sources: { url: string; type: string }[];
|
41 |
+
}
|
42 |
+
|
43 |
+
class MegaCloud {
|
44 |
+
private serverName = "megacloud";
|
45 |
+
|
46 |
+
async extract(videoUrl: URL) {
|
47 |
+
try {
|
48 |
+
const extractedData: ExtractedData = {
|
49 |
+
tracks: [],
|
50 |
+
intro: {
|
51 |
+
start: 0,
|
52 |
+
end: 0,
|
53 |
+
},
|
54 |
+
outro: {
|
55 |
+
start: 0,
|
56 |
+
end: 0,
|
57 |
+
},
|
58 |
+
sources: [],
|
59 |
+
};
|
60 |
+
|
61 |
+
const videoId = videoUrl?.href?.split("/")?.pop()?.split("?")[0];
|
62 |
+
const { data: srcsData } = await axios.get<extractedSrc>(
|
63 |
+
megacloud.sources.concat(videoId || ""),
|
64 |
+
{
|
65 |
+
headers: {
|
66 |
+
Accept: "*/*",
|
67 |
+
"X-Requested-With": "XMLHttpRequest",
|
68 |
+
"User-Agent":
|
69 |
+
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36",
|
70 |
+
Referer: videoUrl.href,
|
71 |
+
},
|
72 |
+
}
|
73 |
+
);
|
74 |
+
if (!srcsData) {
|
75 |
+
throw createHttpError.NotFound("Url may have an invalid video id");
|
76 |
+
}
|
77 |
+
|
78 |
+
// console.log(JSON.stringify(srcsData, null, 2));
|
79 |
+
|
80 |
+
const encryptedString = srcsData.sources;
|
81 |
+
if (srcsData.encrypted && Array.isArray(encryptedString)) {
|
82 |
+
extractedData.intro = srcsData.intro;
|
83 |
+
extractedData.outro = srcsData.outro;
|
84 |
+
extractedData.tracks = srcsData.tracks;
|
85 |
+
extractedData.sources = encryptedString.map((s) => ({
|
86 |
+
url: s.file,
|
87 |
+
type: s.type,
|
88 |
+
}));
|
89 |
+
|
90 |
+
return extractedData;
|
91 |
+
}
|
92 |
+
|
93 |
+
let text: string;
|
94 |
+
const { data } = await axios.get(
|
95 |
+
megacloud.script.concat(Date.now().toString())
|
96 |
+
);
|
97 |
+
|
98 |
+
text = data;
|
99 |
+
if (!text) {
|
100 |
+
throw createHttpError.InternalServerError(
|
101 |
+
"Couldn't fetch script to decrypt resource"
|
102 |
+
);
|
103 |
+
}
|
104 |
+
|
105 |
+
const vars = this.extractVariables(text, "MEGACLOUD");
|
106 |
+
const { secret, encryptedSource } = this.getSecret(
|
107 |
+
encryptedString as string,
|
108 |
+
vars
|
109 |
+
);
|
110 |
+
const decrypted = this.decrypt(encryptedSource, secret);
|
111 |
+
try {
|
112 |
+
const sources = JSON.parse(decrypted);
|
113 |
+
extractedData.intro = srcsData.intro;
|
114 |
+
extractedData.outro = srcsData.outro;
|
115 |
+
extractedData.tracks = srcsData.tracks;
|
116 |
+
extractedData.sources = sources.map((s: any) => ({
|
117 |
+
url: s.file,
|
118 |
+
type: s.type,
|
119 |
+
}));
|
120 |
+
|
121 |
+
return extractedData;
|
122 |
+
} catch (error) {
|
123 |
+
throw createHttpError.InternalServerError("Failed to decrypt resource");
|
124 |
+
}
|
125 |
+
} catch (err) {
|
126 |
+
// console.log(err);
|
127 |
+
throw err;
|
128 |
+
}
|
129 |
+
}
|
130 |
+
|
131 |
+
extractVariables(text: string, sourceName: string) {
|
132 |
+
// extract needed variables
|
133 |
+
let allvars;
|
134 |
+
if (sourceName !== "MEGACLOUD") {
|
135 |
+
allvars =
|
136 |
+
text
|
137 |
+
.match(
|
138 |
+
/const (?:\w{1,2}=(?:'.{0,50}?'|\w{1,2}\(.{0,20}?\)).{0,20}?,){7}.+?;/gm
|
139 |
+
)
|
140 |
+
?.at(-1) ?? "";
|
141 |
+
} else {
|
142 |
+
allvars =
|
143 |
+
text
|
144 |
+
.match(/const \w{1,2}=new URLSearchParams.+?;(?=function)/gm)
|
145 |
+
?.at(-1) ?? "";
|
146 |
+
}
|
147 |
+
// and convert their values into an array of numbers
|
148 |
+
const vars = allvars
|
149 |
+
.slice(0, -1)
|
150 |
+
.split("=")
|
151 |
+
.slice(1)
|
152 |
+
.map((pair) => Number(pair.split(",").at(0)))
|
153 |
+
.filter((num) => num === 0 || num);
|
154 |
+
|
155 |
+
return vars;
|
156 |
+
}
|
157 |
+
|
158 |
+
getSecret(encryptedString: string, values: number[]) {
|
159 |
+
let secret = "",
|
160 |
+
encryptedSource = encryptedString,
|
161 |
+
totalInc = 0;
|
162 |
+
|
163 |
+
for (let i = 0; i < values[0]!; i++) {
|
164 |
+
let start, inc;
|
165 |
+
switch (i) {
|
166 |
+
case 0:
|
167 |
+
(start = values[2]), (inc = values[1]);
|
168 |
+
break;
|
169 |
+
case 1:
|
170 |
+
(start = values[4]), (inc = values[3]);
|
171 |
+
break;
|
172 |
+
case 2:
|
173 |
+
(start = values[6]), (inc = values[5]);
|
174 |
+
break;
|
175 |
+
case 3:
|
176 |
+
(start = values[8]), (inc = values[7]);
|
177 |
+
break;
|
178 |
+
case 4:
|
179 |
+
(start = values[10]), (inc = values[9]);
|
180 |
+
break;
|
181 |
+
case 5:
|
182 |
+
(start = values[12]), (inc = values[11]);
|
183 |
+
break;
|
184 |
+
case 6:
|
185 |
+
(start = values[14]), (inc = values[13]);
|
186 |
+
break;
|
187 |
+
case 7:
|
188 |
+
(start = values[16]), (inc = values[15]);
|
189 |
+
break;
|
190 |
+
case 8:
|
191 |
+
(start = values[18]), (inc = values[17]);
|
192 |
+
}
|
193 |
+
const from = start! + totalInc,
|
194 |
+
to = from + inc!;
|
195 |
+
(secret += encryptedString.slice(from, to)),
|
196 |
+
(encryptedSource = encryptedSource.replace(
|
197 |
+
encryptedString.substring(from, to),
|
198 |
+
""
|
199 |
+
)),
|
200 |
+
(totalInc += inc!);
|
201 |
+
}
|
202 |
+
|
203 |
+
return { secret, encryptedSource };
|
204 |
+
}
|
205 |
+
|
206 |
+
decrypt(encrypted: string, keyOrSecret: string, maybe_iv?: string) {
|
207 |
+
let key;
|
208 |
+
let iv;
|
209 |
+
let contents;
|
210 |
+
if (maybe_iv) {
|
211 |
+
key = keyOrSecret;
|
212 |
+
iv = maybe_iv;
|
213 |
+
contents = encrypted;
|
214 |
+
} else {
|
215 |
+
// copied from 'https://github.com/brix/crypto-js/issues/468'
|
216 |
+
const cypher = Buffer.from(encrypted, "base64");
|
217 |
+
const salt = cypher.subarray(8, 16);
|
218 |
+
const password = Buffer.concat([
|
219 |
+
Buffer.from(keyOrSecret, "binary"),
|
220 |
+
salt,
|
221 |
+
]);
|
222 |
+
const md5Hashes = [];
|
223 |
+
let digest = password;
|
224 |
+
for (let i = 0; i < 3; i++) {
|
225 |
+
md5Hashes[i] = crypto.createHash("md5").update(digest).digest();
|
226 |
+
digest = Buffer.concat([md5Hashes[i], password]);
|
227 |
+
}
|
228 |
+
key = Buffer.concat([md5Hashes[0], md5Hashes[1]]);
|
229 |
+
iv = md5Hashes[2];
|
230 |
+
contents = cypher.subarray(16);
|
231 |
+
}
|
232 |
+
|
233 |
+
const decipher = crypto.createDecipheriv("aes-256-cbc", key, iv);
|
234 |
+
const decrypted =
|
235 |
+
decipher.update(
|
236 |
+
contents as any,
|
237 |
+
typeof contents === "string" ? "base64" : undefined,
|
238 |
+
"utf8"
|
239 |
+
) + decipher.final();
|
240 |
+
|
241 |
+
return decrypted;
|
242 |
+
}
|
243 |
+
}
|
244 |
+
|
245 |
+
export default MegaCloud;
|
src/extractors/rapidcloud.ts
ADDED
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import axios from "axios";
|
2 |
+
import CryptoJS from "crypto-js";
|
3 |
+
import { substringAfter, substringBefore } from "../utils/index.js";
|
4 |
+
import type { Video, Subtitle, Intro } from "../types/extractor.js";
|
5 |
+
|
6 |
+
type extractReturn = {
|
7 |
+
sources: Video[];
|
8 |
+
subtitles: Subtitle[];
|
9 |
+
};
|
10 |
+
|
11 |
+
// https://megacloud.tv/embed-2/e-1/IxJ7GjGVCyml?k=1
|
12 |
+
class RapidCloud {
|
13 |
+
private serverName = "RapidCloud";
|
14 |
+
private sources: Video[] = [];
|
15 |
+
|
16 |
+
// https://rapid-cloud.co/embed-6/eVZPDXwVfrY3?vast=1
|
17 |
+
private readonly fallbackKey = "c1d17096f2ca11b7";
|
18 |
+
private readonly host = "https://rapid-cloud.co";
|
19 |
+
|
20 |
+
async extract(videoUrl: URL): Promise<extractReturn> {
|
21 |
+
const result: extractReturn & { intro?: Intro; outro?: Intro } = {
|
22 |
+
sources: [],
|
23 |
+
subtitles: [],
|
24 |
+
};
|
25 |
+
|
26 |
+
try {
|
27 |
+
const id = videoUrl.href.split("/").pop()?.split("?")[0];
|
28 |
+
const options = {
|
29 |
+
headers: {
|
30 |
+
"X-Requested-With": "XMLHttpRequest",
|
31 |
+
},
|
32 |
+
};
|
33 |
+
|
34 |
+
let res = null;
|
35 |
+
|
36 |
+
res = await axios.get(
|
37 |
+
`https://${videoUrl.hostname}/embed-2/ajax/e-1/getSources?id=${id}`,
|
38 |
+
options
|
39 |
+
);
|
40 |
+
|
41 |
+
let {
|
42 |
+
data: { sources, tracks, intro, outro, encrypted },
|
43 |
+
} = res;
|
44 |
+
|
45 |
+
let decryptKey = await (
|
46 |
+
await axios.get(
|
47 |
+
"https://raw.githubusercontent.com/cinemaxhq/keys/e1/key"
|
48 |
+
)
|
49 |
+
).data;
|
50 |
+
|
51 |
+
decryptKey = substringBefore(
|
52 |
+
substringAfter(decryptKey, '"blob-code blob-code-inner js-file-line">'),
|
53 |
+
"</td>"
|
54 |
+
);
|
55 |
+
|
56 |
+
if (!decryptKey) {
|
57 |
+
decryptKey = await (
|
58 |
+
await axios.get(
|
59 |
+
"https://raw.githubusercontent.com/cinemaxhq/keys/e1/key"
|
60 |
+
)
|
61 |
+
).data;
|
62 |
+
}
|
63 |
+
|
64 |
+
if (!decryptKey) decryptKey = this.fallbackKey;
|
65 |
+
|
66 |
+
try {
|
67 |
+
if (encrypted) {
|
68 |
+
const sourcesArray = sources.split("");
|
69 |
+
let extractedKey = "";
|
70 |
+
let currentIndex = 0;
|
71 |
+
|
72 |
+
for (const index of decryptKey) {
|
73 |
+
const start = index[0] + currentIndex;
|
74 |
+
const end = start + index[1];
|
75 |
+
|
76 |
+
for (let i = start; i < end; i++) {
|
77 |
+
extractedKey += res.data.sources[i];
|
78 |
+
sourcesArray[i] = "";
|
79 |
+
}
|
80 |
+
currentIndex += index[1];
|
81 |
+
}
|
82 |
+
|
83 |
+
decryptKey = extractedKey;
|
84 |
+
sources = sourcesArray.join("");
|
85 |
+
|
86 |
+
const decrypt = CryptoJS.AES.decrypt(sources, decryptKey);
|
87 |
+
sources = JSON.parse(decrypt.toString(CryptoJS.enc.Utf8));
|
88 |
+
}
|
89 |
+
} catch (err: any) {
|
90 |
+
console.log(err.message);
|
91 |
+
throw new Error("Cannot decrypt sources. Perhaps the key is invalid.");
|
92 |
+
}
|
93 |
+
|
94 |
+
this.sources = sources?.map((s: any) => ({
|
95 |
+
url: s.file,
|
96 |
+
isM3U8: s.file.includes(".m3u8"),
|
97 |
+
}));
|
98 |
+
|
99 |
+
result.sources.push(...this.sources);
|
100 |
+
|
101 |
+
if (videoUrl.href.includes(new URL(this.host).host)) {
|
102 |
+
result.sources = [];
|
103 |
+
this.sources = [];
|
104 |
+
|
105 |
+
for (const source of sources) {
|
106 |
+
const { data } = await axios.get(source.file, options);
|
107 |
+
const m3u8data = data
|
108 |
+
.split("\n")
|
109 |
+
.filter(
|
110 |
+
(line: string) =>
|
111 |
+
line.includes(".m3u8") && line.includes("RESOLUTION=")
|
112 |
+
);
|
113 |
+
|
114 |
+
const secondHalf = m3u8data.map((line: string) =>
|
115 |
+
line.match(/RESOLUTION=.*,(C)|URI=.*/g)?.map((s) => s.split("=")[1])
|
116 |
+
);
|
117 |
+
|
118 |
+
const TdArray = secondHalf.map((s: string[]) => {
|
119 |
+
const f1 = s[0].split(",C")[0];
|
120 |
+
const f2 = s[1].replace(/"/g, "");
|
121 |
+
|
122 |
+
return [f1, f2];
|
123 |
+
});
|
124 |
+
|
125 |
+
for (const [f1, f2] of TdArray) {
|
126 |
+
this.sources.push({
|
127 |
+
url: `${source.file?.split("master.m3u8")[0]}${f2.replace(
|
128 |
+
"iframes",
|
129 |
+
"index"
|
130 |
+
)}`,
|
131 |
+
quality: f1.split("x")[1] + "p",
|
132 |
+
isM3U8: f2.includes(".m3u8"),
|
133 |
+
});
|
134 |
+
}
|
135 |
+
result.sources.push(...this.sources);
|
136 |
+
}
|
137 |
+
}
|
138 |
+
|
139 |
+
result.intro =
|
140 |
+
intro?.end > 1 ? { start: intro.start, end: intro.end } : undefined;
|
141 |
+
result.outro =
|
142 |
+
outro?.end > 1 ? { start: outro.start, end: outro.end } : undefined;
|
143 |
+
|
144 |
+
result.sources.push({
|
145 |
+
url: sources[0].file,
|
146 |
+
isM3U8: sources[0].file.includes(".m3u8"),
|
147 |
+
quality: "auto",
|
148 |
+
});
|
149 |
+
|
150 |
+
result.subtitles = tracks
|
151 |
+
.map((s: any) =>
|
152 |
+
s.file
|
153 |
+
? { url: s.file, lang: s.label ? s.label : "Thumbnails" }
|
154 |
+
: null
|
155 |
+
)
|
156 |
+
.filter((s: any) => s);
|
157 |
+
|
158 |
+
return result;
|
159 |
+
} catch (err: any) {
|
160 |
+
console.log(err.message);
|
161 |
+
throw err;
|
162 |
+
}
|
163 |
+
}
|
164 |
+
}
|
165 |
+
|
166 |
+
export default RapidCloud;
|
src/extractors/streamsb.ts
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import axios from "axios";
|
2 |
+
import type { Video } from "../types/extractor.js";
|
3 |
+
import { USER_AGENT_HEADER } from "../utils/index.js";
|
4 |
+
|
5 |
+
class StreamSB {
|
6 |
+
private serverName = "streamSB";
|
7 |
+
private sources: Video[] = [];
|
8 |
+
|
9 |
+
private readonly host = "https://watchsb.com/sources50";
|
10 |
+
private readonly host2 = "https://streamsss.net/sources16";
|
11 |
+
|
12 |
+
private PAYLOAD(hex: string): string {
|
13 |
+
// `5363587530696d33443675687c7c${hex}7c7c433569475830474c497a65767c7c73747265616d7362`;
|
14 |
+
return `566d337678566f743674494a7c7c${hex}7c7c346b6767586d6934774855537c7c73747265616d7362/6565417268755339773461447c7c346133383438333436313335376136323337373433383634376337633465366534393338373136643732373736343735373237613763376334363733353737303533366236333463353333363534366137633763373337343732363536313664373336327c7c6b586c3163614468645a47617c7c73747265616d7362`;
|
15 |
+
}
|
16 |
+
|
17 |
+
async extract(videoUrl: URL, isAlt: boolean = false): Promise<Video[]> {
|
18 |
+
let headers: Record<string, string> = {
|
19 |
+
watchsb: "sbstream",
|
20 |
+
Referer: videoUrl.href,
|
21 |
+
"User-Agent": USER_AGENT_HEADER,
|
22 |
+
};
|
23 |
+
let id = videoUrl.href.split("/e/").pop();
|
24 |
+
if (id?.includes("html")) {
|
25 |
+
id = id.split(".html")[0];
|
26 |
+
}
|
27 |
+
const bytes = new TextEncoder().encode(id);
|
28 |
+
|
29 |
+
const res = await axios
|
30 |
+
.get(
|
31 |
+
`${isAlt ? this.host2 : this.host}/${this.PAYLOAD(
|
32 |
+
Buffer.from(bytes).toString("hex")
|
33 |
+
)}`,
|
34 |
+
{ headers }
|
35 |
+
)
|
36 |
+
.catch(() => null);
|
37 |
+
|
38 |
+
if (!res?.data.stream_data) {
|
39 |
+
throw new Error("No source found. Try a different server");
|
40 |
+
}
|
41 |
+
|
42 |
+
headers = {
|
43 |
+
"User-Agent": USER_AGENT_HEADER,
|
44 |
+
Referer: videoUrl.href.split("e/")[0],
|
45 |
+
};
|
46 |
+
|
47 |
+
const m3u8_urls = await axios.get(res.data.stream_data.file, {
|
48 |
+
headers,
|
49 |
+
});
|
50 |
+
|
51 |
+
const videoList = m3u8_urls?.data?.split("#EXT-X-STREAM-INF:") ?? [];
|
52 |
+
|
53 |
+
for (const video of videoList) {
|
54 |
+
if (!video.includes("m3u8")) continue;
|
55 |
+
|
56 |
+
const url = video.split("\n")[1];
|
57 |
+
const quality = video.split("RESOLUTION=")[1].split(",")[0].split("x")[1];
|
58 |
+
|
59 |
+
this.sources.push({
|
60 |
+
url: url,
|
61 |
+
quality: `${quality}p`,
|
62 |
+
isM3U8: true,
|
63 |
+
});
|
64 |
+
}
|
65 |
+
|
66 |
+
this.sources.push({
|
67 |
+
url: res.data.stream_data.file,
|
68 |
+
quality: "auto",
|
69 |
+
isM3U8: res.data.stream_data.file.includes(".m3u8"),
|
70 |
+
});
|
71 |
+
|
72 |
+
return this.sources;
|
73 |
+
}
|
74 |
+
|
75 |
+
private addSources(source: any): void {
|
76 |
+
this.sources.push({
|
77 |
+
url: source.file,
|
78 |
+
isM3U8: source.file.includes(".m3u8"),
|
79 |
+
});
|
80 |
+
}
|
81 |
+
}
|
82 |
+
|
83 |
+
export default StreamSB;
|
src/extractors/streamtape.ts
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import axios from "axios";
|
2 |
+
import { load, type CheerioAPI } from "cheerio";
|
3 |
+
import type { Video } from "../types/extractor.js";
|
4 |
+
|
5 |
+
class StreamTape {
|
6 |
+
private serverName = "StreamTape";
|
7 |
+
private sources: Video[] = [];
|
8 |
+
|
9 |
+
async extract(videoUrl: URL): Promise<Video[]> {
|
10 |
+
try {
|
11 |
+
const { data } = await axios.get(videoUrl.href).catch(() => {
|
12 |
+
throw new Error("Video not found");
|
13 |
+
});
|
14 |
+
|
15 |
+
const $: CheerioAPI = load(data);
|
16 |
+
|
17 |
+
let [fh, sh] = $.html()
|
18 |
+
?.match(/robotlink'\).innerHTML = (.*)'/)![1]
|
19 |
+
.split("+ ('");
|
20 |
+
|
21 |
+
sh = sh.substring(3);
|
22 |
+
fh = fh.replace(/\'/g, "");
|
23 |
+
|
24 |
+
const url = `https:${fh}${sh}`;
|
25 |
+
|
26 |
+
this.sources.push({
|
27 |
+
url: url,
|
28 |
+
isM3U8: url.includes(".m3u8"),
|
29 |
+
});
|
30 |
+
|
31 |
+
return this.sources;
|
32 |
+
} catch (err) {
|
33 |
+
throw new Error((err as Error).message);
|
34 |
+
}
|
35 |
+
}
|
36 |
+
}
|
37 |
+
export default StreamTape;
|
src/parsers/animeAboutInfo.ts
ADDED
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_BASE_URL,
|
3 |
+
extractAnimes,
|
4 |
+
ACCEPT_HEADER,
|
5 |
+
USER_AGENT_HEADER,
|
6 |
+
ACCEPT_ENCODING_HEADER,
|
7 |
+
extractMostPopularAnimes,
|
8 |
+
} from "../utils/index.js";
|
9 |
+
import axios, { AxiosError } from "axios";
|
10 |
+
import createHttpError, { type HttpError } from "http-errors";
|
11 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
12 |
+
import { type ScrapedAnimeAboutInfo } from "../types/parsers/index.js";
|
13 |
+
|
14 |
+
// /anime/info?id=${anime-id}
|
15 |
+
async function scrapeAnimeAboutInfo(
|
16 |
+
id: string
|
17 |
+
): Promise<ScrapedAnimeAboutInfo | HttpError> {
|
18 |
+
const res: ScrapedAnimeAboutInfo = {
|
19 |
+
anime: {
|
20 |
+
info: {
|
21 |
+
id: null,
|
22 |
+
name: null,
|
23 |
+
poster: null,
|
24 |
+
description: null,
|
25 |
+
stats: {
|
26 |
+
rating: null,
|
27 |
+
quality: null,
|
28 |
+
episodes: {
|
29 |
+
sub: null,
|
30 |
+
dub: null,
|
31 |
+
},
|
32 |
+
type: null,
|
33 |
+
duration: null,
|
34 |
+
},
|
35 |
+
},
|
36 |
+
moreInfo: {},
|
37 |
+
},
|
38 |
+
seasons: [],
|
39 |
+
mostPopularAnimes: [],
|
40 |
+
relatedAnimes: [],
|
41 |
+
recommendedAnimes: [],
|
42 |
+
};
|
43 |
+
|
44 |
+
try {
|
45 |
+
const animeUrl: URL = new URL(id, SRC_BASE_URL);
|
46 |
+
const mainPage = await axios.get(animeUrl.href, {
|
47 |
+
headers: {
|
48 |
+
"User-Agent": USER_AGENT_HEADER,
|
49 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
50 |
+
Accept: ACCEPT_HEADER,
|
51 |
+
},
|
52 |
+
});
|
53 |
+
|
54 |
+
const $: CheerioAPI = load(mainPage.data);
|
55 |
+
|
56 |
+
const selector: SelectorType = "#ani_detail .container .anis-content";
|
57 |
+
|
58 |
+
res.anime.info.id =
|
59 |
+
$(selector)
|
60 |
+
?.find(".anisc-detail .film-buttons a.btn-play")
|
61 |
+
?.attr("href")
|
62 |
+
?.split("/")
|
63 |
+
?.pop() || null;
|
64 |
+
res.anime.info.name =
|
65 |
+
$(selector)
|
66 |
+
?.find(".anisc-detail .film-name.dynamic-name")
|
67 |
+
?.text()
|
68 |
+
?.trim() || null;
|
69 |
+
res.anime.info.description =
|
70 |
+
$(selector)
|
71 |
+
?.find(".anisc-detail .film-description .text")
|
72 |
+
.text()
|
73 |
+
?.split("[")
|
74 |
+
?.shift()
|
75 |
+
?.trim() || null;
|
76 |
+
res.anime.info.poster =
|
77 |
+
$(selector)?.find(".film-poster .film-poster-img")?.attr("src")?.trim() ||
|
78 |
+
null;
|
79 |
+
|
80 |
+
// stats
|
81 |
+
res.anime.info.stats.rating =
|
82 |
+
$(`${selector} .film-stats .tick .tick-pg`)?.text()?.trim() || null;
|
83 |
+
res.anime.info.stats.quality =
|
84 |
+
$(`${selector} .film-stats .tick .tick-quality`)?.text()?.trim() || null;
|
85 |
+
res.anime.info.stats.episodes = {
|
86 |
+
sub:
|
87 |
+
Number($(`${selector} .film-stats .tick .tick-sub`)?.text()?.trim()) ||
|
88 |
+
null,
|
89 |
+
dub:
|
90 |
+
Number($(`${selector} .film-stats .tick .tick-dub`)?.text()?.trim()) ||
|
91 |
+
null,
|
92 |
+
};
|
93 |
+
res.anime.info.stats.type =
|
94 |
+
$(`${selector} .film-stats .tick`)
|
95 |
+
?.text()
|
96 |
+
?.trim()
|
97 |
+
?.replace(/[\s\n]+/g, " ")
|
98 |
+
?.split(" ")
|
99 |
+
?.at(-2) || null;
|
100 |
+
res.anime.info.stats.duration =
|
101 |
+
$(`${selector} .film-stats .tick`)
|
102 |
+
?.text()
|
103 |
+
?.trim()
|
104 |
+
?.replace(/[\s\n]+/g, " ")
|
105 |
+
?.split(" ")
|
106 |
+
?.pop() || null;
|
107 |
+
|
108 |
+
// more information
|
109 |
+
$(`${selector} .anisc-info-wrap .anisc-info .item:not(.w-hide)`).each(
|
110 |
+
(i, el) => {
|
111 |
+
let key = $(el)
|
112 |
+
.find(".item-head")
|
113 |
+
.text()
|
114 |
+
.toLowerCase()
|
115 |
+
.replace(":", "")
|
116 |
+
.trim();
|
117 |
+
key = key.includes(" ") ? key.replace(" ", "") : key;
|
118 |
+
|
119 |
+
const value = [
|
120 |
+
...$(el)
|
121 |
+
.find("*:not(.item-head)")
|
122 |
+
.map((i, el) => $(el).text().trim()),
|
123 |
+
]
|
124 |
+
.map((i) => `${i}`)
|
125 |
+
.toString()
|
126 |
+
.trim();
|
127 |
+
|
128 |
+
if (key === "genres") {
|
129 |
+
res.anime.moreInfo[key] = value.split(",").map((i) => i.trim());
|
130 |
+
return;
|
131 |
+
}
|
132 |
+
if (key === "producers") {
|
133 |
+
res.anime.moreInfo[key] = value.split(",").map((i) => i.trim());
|
134 |
+
return;
|
135 |
+
}
|
136 |
+
res.anime.moreInfo[key] = value;
|
137 |
+
}
|
138 |
+
);
|
139 |
+
|
140 |
+
// more seasons
|
141 |
+
const seasonsSelector: SelectorType = "#main-content .os-list a.os-item";
|
142 |
+
$(seasonsSelector).each((i, el) => {
|
143 |
+
res.seasons.push({
|
144 |
+
id: $(el)?.attr("href")?.slice(1)?.trim() || null,
|
145 |
+
name: $(el)?.attr("title")?.trim() || null,
|
146 |
+
title: $(el)?.find(".title")?.text()?.trim(),
|
147 |
+
poster:
|
148 |
+
$(el)
|
149 |
+
?.find(".season-poster")
|
150 |
+
?.attr("style")
|
151 |
+
?.split(" ")
|
152 |
+
?.pop()
|
153 |
+
?.split("(")
|
154 |
+
?.pop()
|
155 |
+
?.split(")")[0] || null,
|
156 |
+
isCurrent: $(el).hasClass("active"),
|
157 |
+
});
|
158 |
+
});
|
159 |
+
|
160 |
+
const relatedAnimeSelector: SelectorType =
|
161 |
+
"#main-sidebar .block_area.block_area_sidebar.block_area-realtime:nth-of-type(1) .anif-block-ul ul li";
|
162 |
+
res.relatedAnimes = extractMostPopularAnimes($, relatedAnimeSelector);
|
163 |
+
|
164 |
+
const mostPopularSelector: SelectorType =
|
165 |
+
"#main-sidebar .block_area.block_area_sidebar.block_area-realtime:nth-of-type(2) .anif-block-ul ul li";
|
166 |
+
res.mostPopularAnimes = extractMostPopularAnimes($, mostPopularSelector);
|
167 |
+
|
168 |
+
const recommendedAnimeSelector: SelectorType =
|
169 |
+
"#main-content .block_area.block_area_category .tab-content .flw-item";
|
170 |
+
res.recommendedAnimes = extractAnimes($, recommendedAnimeSelector);
|
171 |
+
|
172 |
+
return res;
|
173 |
+
} catch (err: any) {
|
174 |
+
if (err instanceof AxiosError) {
|
175 |
+
throw createHttpError(
|
176 |
+
err?.response?.status || 500,
|
177 |
+
err?.response?.statusText || "Something went wrong"
|
178 |
+
);
|
179 |
+
}
|
180 |
+
throw createHttpError.InternalServerError(err?.message);
|
181 |
+
}
|
182 |
+
}
|
183 |
+
|
184 |
+
export default scrapeAnimeAboutInfo;
|
src/parsers/animeCategory.ts
ADDED
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_BASE_URL,
|
3 |
+
extractAnimes,
|
4 |
+
ACCEPT_HEADER,
|
5 |
+
USER_AGENT_HEADER,
|
6 |
+
extractTop10Animes,
|
7 |
+
ACCEPT_ENCODING_HEADER,
|
8 |
+
} from "../utils/index.js";
|
9 |
+
import axios, { AxiosError } from "axios";
|
10 |
+
import { type AnimeCategories } from "../types/anime.js";
|
11 |
+
import createHttpError, { type HttpError } from "http-errors";
|
12 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
13 |
+
import { type ScrapedAnimeCategory } from "../types/parsers/index.js";
|
14 |
+
|
15 |
+
// /anime/:category?page=${page}
|
16 |
+
async function scrapeAnimeCategory(
|
17 |
+
category: AnimeCategories,
|
18 |
+
page: number = 1
|
19 |
+
): Promise<ScrapedAnimeCategory | HttpError> {
|
20 |
+
const res: ScrapedAnimeCategory = {
|
21 |
+
animes: [],
|
22 |
+
genres: [],
|
23 |
+
top10Animes: {
|
24 |
+
today: [],
|
25 |
+
week: [],
|
26 |
+
month: [],
|
27 |
+
},
|
28 |
+
category,
|
29 |
+
currentPage: Number(page),
|
30 |
+
hasNextPage: false,
|
31 |
+
totalPages: 1,
|
32 |
+
};
|
33 |
+
|
34 |
+
try {
|
35 |
+
const scrapeUrl: URL = new URL(category, SRC_BASE_URL);
|
36 |
+
const mainPage = await axios.get(`${scrapeUrl}?page=${page}`, {
|
37 |
+
headers: {
|
38 |
+
"User-Agent": USER_AGENT_HEADER,
|
39 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
40 |
+
Accept: ACCEPT_HEADER,
|
41 |
+
},
|
42 |
+
});
|
43 |
+
|
44 |
+
const $: CheerioAPI = load(mainPage.data);
|
45 |
+
|
46 |
+
const selector: SelectorType =
|
47 |
+
"#main-content .tab-content .film_list-wrap .flw-item";
|
48 |
+
|
49 |
+
const categoryNameSelector: SelectorType =
|
50 |
+
"#main-content .block_area .block_area-header .cat-heading";
|
51 |
+
res.category = $(categoryNameSelector)?.text()?.trim() ?? category;
|
52 |
+
|
53 |
+
res.hasNextPage =
|
54 |
+
$(".pagination > li").length > 0
|
55 |
+
? $(".pagination li.active").length > 0
|
56 |
+
? $(".pagination > li").last().hasClass("active")
|
57 |
+
? false
|
58 |
+
: true
|
59 |
+
: false
|
60 |
+
: false;
|
61 |
+
|
62 |
+
res.totalPages =
|
63 |
+
Number(
|
64 |
+
$('.pagination > .page-item a[title="Last"]')
|
65 |
+
?.attr("href")
|
66 |
+
?.split("=")
|
67 |
+
.pop() ??
|
68 |
+
$('.pagination > .page-item a[title="Next"]')
|
69 |
+
?.attr("href")
|
70 |
+
?.split("=")
|
71 |
+
.pop() ??
|
72 |
+
$(".pagination > .page-item.active a")?.text()?.trim()
|
73 |
+
) || 1;
|
74 |
+
|
75 |
+
res.animes = extractAnimes($, selector);
|
76 |
+
|
77 |
+
if (res.animes.length === 0 && !res.hasNextPage) {
|
78 |
+
res.totalPages = 0;
|
79 |
+
}
|
80 |
+
|
81 |
+
const genreSelector: SelectorType =
|
82 |
+
"#main-sidebar .block_area.block_area_sidebar.block_area-genres .sb-genre-list li";
|
83 |
+
$(genreSelector).each((i, el) => {
|
84 |
+
res.genres.push(`${$(el).text().trim()}`);
|
85 |
+
});
|
86 |
+
|
87 |
+
const top10AnimeSelector: SelectorType =
|
88 |
+
'#main-sidebar .block_area-realtime [id^="top-viewed-"]';
|
89 |
+
|
90 |
+
$(top10AnimeSelector).each((i, el) => {
|
91 |
+
const period = $(el).attr("id")?.split("-")?.pop()?.trim();
|
92 |
+
|
93 |
+
if (period === "day") {
|
94 |
+
res.top10Animes.today = extractTop10Animes($, period);
|
95 |
+
return;
|
96 |
+
}
|
97 |
+
if (period === "week") {
|
98 |
+
res.top10Animes.week = extractTop10Animes($, period);
|
99 |
+
return;
|
100 |
+
}
|
101 |
+
if (period === "month") {
|
102 |
+
res.top10Animes.month = extractTop10Animes($, period);
|
103 |
+
}
|
104 |
+
});
|
105 |
+
|
106 |
+
return res;
|
107 |
+
} catch (err: any) {
|
108 |
+
if (err instanceof AxiosError) {
|
109 |
+
throw createHttpError(
|
110 |
+
err?.response?.status || 500,
|
111 |
+
err?.response?.statusText || "Something went wrong"
|
112 |
+
);
|
113 |
+
}
|
114 |
+
throw createHttpError.InternalServerError(err?.message);
|
115 |
+
}
|
116 |
+
}
|
117 |
+
|
118 |
+
export default scrapeAnimeCategory;
|
src/parsers/animeEpisodeSrcs.ts
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_AJAX_URL,
|
3 |
+
SRC_BASE_URL,
|
4 |
+
retrieveServerId,
|
5 |
+
USER_AGENT_HEADER,
|
6 |
+
} from "../utils/index.js";
|
7 |
+
import axios, { AxiosError } from "axios";
|
8 |
+
import { load, type CheerioAPI } from "cheerio";
|
9 |
+
import createHttpError, { type HttpError } from "http-errors";
|
10 |
+
import { type AnimeServers, Servers } from "../types/anime.js";
|
11 |
+
import {
|
12 |
+
RapidCloud,
|
13 |
+
StreamSB,
|
14 |
+
StreamTape,
|
15 |
+
MegaCloud,
|
16 |
+
} from "../extractors/index.js";
|
17 |
+
import { type ScrapedAnimeEpisodesSources } from "../types/parsers/index.js";
|
18 |
+
|
19 |
+
// vidtreaming -> 4
|
20 |
+
// rapidcloud -> 1
|
21 |
+
// streamsb -> 5
|
22 |
+
// streamtape -> 3
|
23 |
+
|
24 |
+
// /anime/episode-srcs?id=${episodeId}?server=${server}&category=${category (dub or sub)}
|
25 |
+
async function scrapeAnimeEpisodeSources(
|
26 |
+
episodeId: string,
|
27 |
+
server: AnimeServers = Servers.VidStreaming,
|
28 |
+
category: "sub" | "dub" = "sub"
|
29 |
+
): Promise<ScrapedAnimeEpisodesSources | HttpError> {
|
30 |
+
if (episodeId.startsWith("http")) {
|
31 |
+
const serverUrl = new URL(episodeId);
|
32 |
+
switch (server) {
|
33 |
+
case Servers.VidStreaming:
|
34 |
+
case Servers.VidCloud:
|
35 |
+
return {
|
36 |
+
...(await new MegaCloud().extract(serverUrl)),
|
37 |
+
};
|
38 |
+
case Servers.StreamSB:
|
39 |
+
return {
|
40 |
+
headers: {
|
41 |
+
Referer: serverUrl.href,
|
42 |
+
watchsb: "streamsb",
|
43 |
+
"User-Agent": USER_AGENT_HEADER,
|
44 |
+
},
|
45 |
+
sources: await new StreamSB().extract(serverUrl, true),
|
46 |
+
};
|
47 |
+
case Servers.StreamTape:
|
48 |
+
return {
|
49 |
+
headers: { Referer: serverUrl.href, "User-Agent": USER_AGENT_HEADER },
|
50 |
+
sources: await new StreamTape().extract(serverUrl),
|
51 |
+
};
|
52 |
+
default: // vidcloud
|
53 |
+
return {
|
54 |
+
headers: { Referer: serverUrl.href },
|
55 |
+
...(await new RapidCloud().extract(serverUrl)),
|
56 |
+
};
|
57 |
+
}
|
58 |
+
}
|
59 |
+
|
60 |
+
const epId = new URL(`/watch/${episodeId}`, SRC_BASE_URL).href;
|
61 |
+
console.log(epId);
|
62 |
+
|
63 |
+
try {
|
64 |
+
const resp = await axios.get(
|
65 |
+
`${SRC_AJAX_URL}/v2/episode/servers?episodeId=${epId.split("?ep=")[1]}`,
|
66 |
+
{
|
67 |
+
headers: {
|
68 |
+
Referer: epId,
|
69 |
+
"User-Agent": USER_AGENT_HEADER,
|
70 |
+
"X-Requested-With": "XMLHttpRequest",
|
71 |
+
},
|
72 |
+
}
|
73 |
+
);
|
74 |
+
|
75 |
+
const $: CheerioAPI = load(resp.data.html);
|
76 |
+
|
77 |
+
let serverId: string | null = null;
|
78 |
+
|
79 |
+
try {
|
80 |
+
console.log("THE SERVER: ", server);
|
81 |
+
|
82 |
+
switch (server) {
|
83 |
+
case Servers.VidCloud: {
|
84 |
+
serverId = retrieveServerId($, 1, category);
|
85 |
+
if (!serverId) throw new Error("RapidCloud not found");
|
86 |
+
break;
|
87 |
+
}
|
88 |
+
case Servers.VidStreaming: {
|
89 |
+
serverId = retrieveServerId($, 4, category);
|
90 |
+
console.log("SERVER_ID: ", serverId);
|
91 |
+
if (!serverId) throw new Error("VidStreaming not found");
|
92 |
+
break;
|
93 |
+
}
|
94 |
+
case Servers.StreamSB: {
|
95 |
+
serverId = retrieveServerId($, 5, category);
|
96 |
+
if (!serverId) throw new Error("StreamSB not found");
|
97 |
+
break;
|
98 |
+
}
|
99 |
+
case Servers.StreamTape: {
|
100 |
+
serverId = retrieveServerId($, 3, category);
|
101 |
+
if (!serverId) throw new Error("StreamTape not found");
|
102 |
+
break;
|
103 |
+
}
|
104 |
+
}
|
105 |
+
} catch (err) {
|
106 |
+
throw createHttpError.NotFound(
|
107 |
+
"Couldn't find server. Try another server"
|
108 |
+
);
|
109 |
+
}
|
110 |
+
|
111 |
+
const {
|
112 |
+
data: { link },
|
113 |
+
} = await axios.get(`${SRC_AJAX_URL}/v2/episode/sources?id=${serverId}`);
|
114 |
+
console.log("THE LINK: ", link);
|
115 |
+
|
116 |
+
return await scrapeAnimeEpisodeSources(link, server);
|
117 |
+
} catch (err: any) {
|
118 |
+
console.log(err);
|
119 |
+
if (err instanceof AxiosError) {
|
120 |
+
throw createHttpError(
|
121 |
+
err?.response?.status || 500,
|
122 |
+
err?.response?.statusText || "Something went wrong"
|
123 |
+
);
|
124 |
+
}
|
125 |
+
throw createHttpError.InternalServerError(err?.message);
|
126 |
+
}
|
127 |
+
}
|
128 |
+
|
129 |
+
export default scrapeAnimeEpisodeSources;
|
src/parsers/animeEpisodes.ts
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_BASE_URL,
|
3 |
+
SRC_AJAX_URL,
|
4 |
+
ACCEPT_HEADER,
|
5 |
+
USER_AGENT_HEADER,
|
6 |
+
ACCEPT_ENCODING_HEADER,
|
7 |
+
} from "../utils/index.js";
|
8 |
+
import axios, { AxiosError } from "axios";
|
9 |
+
import { load, type CheerioAPI } from "cheerio";
|
10 |
+
import createHttpError, { type HttpError } from "http-errors";
|
11 |
+
import { type ScrapedAnimeEpisodes } from "../types/parsers/index.js";
|
12 |
+
|
13 |
+
// /anime/episodes/${anime-id}
|
14 |
+
async function scrapeAnimeEpisodes(
|
15 |
+
animeId: string
|
16 |
+
): Promise<ScrapedAnimeEpisodes | HttpError> {
|
17 |
+
const res: ScrapedAnimeEpisodes = {
|
18 |
+
totalEpisodes: 0,
|
19 |
+
episodes: [],
|
20 |
+
};
|
21 |
+
|
22 |
+
try {
|
23 |
+
const episodesAjax = await axios.get(
|
24 |
+
`${SRC_AJAX_URL}/v2/episode/list/${animeId.split("-").pop()}`,
|
25 |
+
{
|
26 |
+
headers: {
|
27 |
+
Accept: ACCEPT_HEADER,
|
28 |
+
"User-Agent": USER_AGENT_HEADER,
|
29 |
+
"X-Requested-With": "XMLHttpRequest",
|
30 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
31 |
+
Referer: `${SRC_BASE_URL}/watch/${animeId}`,
|
32 |
+
},
|
33 |
+
}
|
34 |
+
);
|
35 |
+
|
36 |
+
const $: CheerioAPI = load(episodesAjax.data.html);
|
37 |
+
|
38 |
+
res.totalEpisodes = Number($(".detail-infor-content .ss-list a").length);
|
39 |
+
|
40 |
+
$(".detail-infor-content .ss-list a").each((i, el) => {
|
41 |
+
res.episodes.push({
|
42 |
+
title: $(el)?.attr("title")?.trim() || null,
|
43 |
+
episodeId: $(el)?.attr("href")?.split("/")?.pop() || null,
|
44 |
+
number: Number($(el).attr("data-number")),
|
45 |
+
isFiller: $(el).hasClass("ssl-item-filler"),
|
46 |
+
});
|
47 |
+
});
|
48 |
+
|
49 |
+
return res;
|
50 |
+
} catch (err: any) {
|
51 |
+
if (err instanceof AxiosError) {
|
52 |
+
throw createHttpError(
|
53 |
+
err?.response?.status || 500,
|
54 |
+
err?.response?.statusText || "Something went wrong"
|
55 |
+
);
|
56 |
+
}
|
57 |
+
throw createHttpError.InternalServerError(err?.message);
|
58 |
+
}
|
59 |
+
}
|
60 |
+
|
61 |
+
export default scrapeAnimeEpisodes;
|
src/parsers/animeGenre.ts
ADDED
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_BASE_URL,
|
3 |
+
ACCEPT_HEADER,
|
4 |
+
extractAnimes,
|
5 |
+
USER_AGENT_HEADER,
|
6 |
+
ACCEPT_ENCODING_HEADER,
|
7 |
+
extractMostPopularAnimes,
|
8 |
+
} from "../utils/index.js";
|
9 |
+
import axios, { AxiosError } from "axios";
|
10 |
+
import createHttpError, { type HttpError } from "http-errors";
|
11 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
12 |
+
import type { ScrapedGenreAnime } from "../types/parsers/index.js";
|
13 |
+
|
14 |
+
// /anime/genre/${name}?page=${page}
|
15 |
+
async function scrapeGenreAnime(
|
16 |
+
genreName: string,
|
17 |
+
page: number = 1
|
18 |
+
): Promise<ScrapedGenreAnime | HttpError> {
|
19 |
+
const res: ScrapedGenreAnime = {
|
20 |
+
genreName,
|
21 |
+
animes: [],
|
22 |
+
genres: [],
|
23 |
+
topAiringAnimes: [],
|
24 |
+
totalPages: 1,
|
25 |
+
hasNextPage: false,
|
26 |
+
currentPage: Number(page),
|
27 |
+
};
|
28 |
+
|
29 |
+
// there's a typo with zoro where martial arts is marial arts
|
30 |
+
genreName = genreName === "martial-arts" ? "marial-arts" : genreName;
|
31 |
+
|
32 |
+
try {
|
33 |
+
const genreUrl: URL = new URL(
|
34 |
+
`/genre/${genreName}?page=${page}`,
|
35 |
+
SRC_BASE_URL
|
36 |
+
);
|
37 |
+
|
38 |
+
const mainPage = await axios.get(genreUrl.href, {
|
39 |
+
headers: {
|
40 |
+
"User-Agent": USER_AGENT_HEADER,
|
41 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
42 |
+
Accept: ACCEPT_HEADER,
|
43 |
+
},
|
44 |
+
});
|
45 |
+
|
46 |
+
const $: CheerioAPI = load(mainPage.data);
|
47 |
+
|
48 |
+
const selector: SelectorType =
|
49 |
+
"#main-content .tab-content .film_list-wrap .flw-item";
|
50 |
+
|
51 |
+
const genreNameSelector: SelectorType =
|
52 |
+
"#main-content .block_area .block_area-header .cat-heading";
|
53 |
+
res.genreName = $(genreNameSelector)?.text()?.trim() ?? genreName;
|
54 |
+
|
55 |
+
res.hasNextPage =
|
56 |
+
$(".pagination > li").length > 0
|
57 |
+
? $(".pagination li.active").length > 0
|
58 |
+
? $(".pagination > li").last().hasClass("active")
|
59 |
+
? false
|
60 |
+
: true
|
61 |
+
: false
|
62 |
+
: false;
|
63 |
+
|
64 |
+
res.totalPages =
|
65 |
+
Number(
|
66 |
+
$('.pagination > .page-item a[title="Last"]')
|
67 |
+
?.attr("href")
|
68 |
+
?.split("=")
|
69 |
+
.pop() ??
|
70 |
+
$('.pagination > .page-item a[title="Next"]')
|
71 |
+
?.attr("href")
|
72 |
+
?.split("=")
|
73 |
+
.pop() ??
|
74 |
+
$(".pagination > .page-item.active a")?.text()?.trim()
|
75 |
+
) || 1;
|
76 |
+
|
77 |
+
res.animes = extractAnimes($, selector);
|
78 |
+
|
79 |
+
if (res.animes.length === 0 && !res.hasNextPage) {
|
80 |
+
res.totalPages = 0;
|
81 |
+
}
|
82 |
+
|
83 |
+
const genreSelector: SelectorType =
|
84 |
+
"#main-sidebar .block_area.block_area_sidebar.block_area-genres .sb-genre-list li";
|
85 |
+
$(genreSelector).each((i, el) => {
|
86 |
+
res.genres.push(`${$(el).text().trim()}`);
|
87 |
+
});
|
88 |
+
|
89 |
+
const topAiringSelector: SelectorType =
|
90 |
+
"#main-sidebar .block_area.block_area_sidebar.block_area-realtime .anif-block-ul ul li";
|
91 |
+
res.topAiringAnimes = extractMostPopularAnimes($, topAiringSelector);
|
92 |
+
|
93 |
+
return res;
|
94 |
+
} catch (err: any) {
|
95 |
+
if (err instanceof AxiosError) {
|
96 |
+
throw createHttpError(
|
97 |
+
err?.response?.status || 500,
|
98 |
+
err?.response?.statusText || "Something went wrong"
|
99 |
+
);
|
100 |
+
}
|
101 |
+
throw createHttpError.InternalServerError(err?.message);
|
102 |
+
}
|
103 |
+
}
|
104 |
+
|
105 |
+
export default scrapeGenreAnime;
|
src/parsers/animeProducer.ts
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_BASE_URL,
|
3 |
+
ACCEPT_HEADER,
|
4 |
+
USER_AGENT_HEADER,
|
5 |
+
ACCEPT_ENCODING_HEADER,
|
6 |
+
extractMostPopularAnimes,
|
7 |
+
extractAnimes,
|
8 |
+
extractTop10Animes,
|
9 |
+
} from "../utils/index.js";
|
10 |
+
import axios, { AxiosError } from "axios";
|
11 |
+
import createHttpError, { type HttpError } from "http-errors";
|
12 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
13 |
+
import type { ScrapedProducerAnime } from "../types/parsers/index.js";
|
14 |
+
|
15 |
+
// /anime/producer/${name}?page=${page}
|
16 |
+
async function scrapeProducerAnimes(
|
17 |
+
producerName: string,
|
18 |
+
page: number = 1
|
19 |
+
): Promise<ScrapedProducerAnime | HttpError> {
|
20 |
+
const res: ScrapedProducerAnime = {
|
21 |
+
producerName,
|
22 |
+
animes: [],
|
23 |
+
top10Animes: {
|
24 |
+
today: [],
|
25 |
+
week: [],
|
26 |
+
month: [],
|
27 |
+
},
|
28 |
+
topAiringAnimes: [],
|
29 |
+
totalPages: 1,
|
30 |
+
hasNextPage: false,
|
31 |
+
currentPage: Number(page),
|
32 |
+
};
|
33 |
+
|
34 |
+
try {
|
35 |
+
const producerUrl: URL = new URL(
|
36 |
+
`/producer/${producerName}?page=${page}`,
|
37 |
+
SRC_BASE_URL
|
38 |
+
);
|
39 |
+
|
40 |
+
const mainPage = await axios.get(producerUrl.href, {
|
41 |
+
headers: {
|
42 |
+
Accept: ACCEPT_HEADER,
|
43 |
+
"User-Agent": USER_AGENT_HEADER,
|
44 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
45 |
+
},
|
46 |
+
});
|
47 |
+
|
48 |
+
const $: CheerioAPI = load(mainPage.data);
|
49 |
+
|
50 |
+
const animeSelector: SelectorType =
|
51 |
+
"#main-content .tab-content .film_list-wrap .flw-item";
|
52 |
+
|
53 |
+
res.hasNextPage =
|
54 |
+
$(".pagination > li").length > 0
|
55 |
+
? $(".pagination li.active").length > 0
|
56 |
+
? $(".pagination > li").last().hasClass("active")
|
57 |
+
? false
|
58 |
+
: true
|
59 |
+
: false
|
60 |
+
: false;
|
61 |
+
|
62 |
+
res.totalPages =
|
63 |
+
Number(
|
64 |
+
$('.pagination > .page-item a[title="Last"]')
|
65 |
+
?.attr("href")
|
66 |
+
?.split("=")
|
67 |
+
.pop() ??
|
68 |
+
$('.pagination > .page-item a[title="Next"]')
|
69 |
+
?.attr("href")
|
70 |
+
?.split("=")
|
71 |
+
.pop() ??
|
72 |
+
$(".pagination > .page-item.active a")?.text()?.trim()
|
73 |
+
) || 1;
|
74 |
+
|
75 |
+
res.animes = extractAnimes($, animeSelector);
|
76 |
+
|
77 |
+
if (res.animes.length === 0 && !res.hasNextPage) {
|
78 |
+
res.totalPages = 0;
|
79 |
+
}
|
80 |
+
|
81 |
+
const producerNameSelector: SelectorType =
|
82 |
+
"#main-content .block_area .block_area-header .cat-heading";
|
83 |
+
res.producerName = $(producerNameSelector)?.text()?.trim() ?? producerName;
|
84 |
+
|
85 |
+
const top10AnimeSelector: SelectorType =
|
86 |
+
'#main-sidebar .block_area-realtime [id^="top-viewed-"]';
|
87 |
+
|
88 |
+
$(top10AnimeSelector).each((_, el) => {
|
89 |
+
const period = $(el).attr("id")?.split("-")?.pop()?.trim();
|
90 |
+
|
91 |
+
if (period === "day") {
|
92 |
+
res.top10Animes.today = extractTop10Animes($, period);
|
93 |
+
return;
|
94 |
+
}
|
95 |
+
if (period === "week") {
|
96 |
+
res.top10Animes.week = extractTop10Animes($, period);
|
97 |
+
return;
|
98 |
+
}
|
99 |
+
if (period === "month") {
|
100 |
+
res.top10Animes.month = extractTop10Animes($, period);
|
101 |
+
}
|
102 |
+
});
|
103 |
+
|
104 |
+
const topAiringSelector: SelectorType =
|
105 |
+
"#main-sidebar .block_area_sidebar:nth-child(2) .block_area-content .anif-block-ul ul li";
|
106 |
+
res.topAiringAnimes = extractMostPopularAnimes($, topAiringSelector);
|
107 |
+
|
108 |
+
return res;
|
109 |
+
} catch (err: any) {
|
110 |
+
if (err instanceof AxiosError) {
|
111 |
+
throw createHttpError(
|
112 |
+
err?.response?.status || 500,
|
113 |
+
err?.response?.statusText || "Something went wrong"
|
114 |
+
);
|
115 |
+
}
|
116 |
+
throw createHttpError.InternalServerError(err?.message);
|
117 |
+
}
|
118 |
+
}
|
119 |
+
|
120 |
+
export default scrapeProducerAnimes;
|
src/parsers/animeSearch.ts
ADDED
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_SEARCH_URL,
|
3 |
+
ACCEPT_HEADER,
|
4 |
+
USER_AGENT_HEADER,
|
5 |
+
ACCEPT_ENCODING_HEADER,
|
6 |
+
extractAnimes,
|
7 |
+
getSearchFilterValue,
|
8 |
+
extractMostPopularAnimes,
|
9 |
+
getSearchDateFilterValue,
|
10 |
+
} from "../utils/index.js";
|
11 |
+
import axios, { AxiosError } from "axios";
|
12 |
+
import createHttpError, { type HttpError } from "http-errors";
|
13 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
14 |
+
import type { ScrapedAnimeSearchResult } from "../types/parsers/index.js";
|
15 |
+
import type { SearchFilters, FilterKeys } from "../types/controllers/index.js";
|
16 |
+
|
17 |
+
// /anime/search?q=${query}&page=${page}
|
18 |
+
async function scrapeAnimeSearch(
|
19 |
+
q: string,
|
20 |
+
page: number = 1,
|
21 |
+
filters: SearchFilters
|
22 |
+
): Promise<ScrapedAnimeSearchResult | HttpError> {
|
23 |
+
const res: ScrapedAnimeSearchResult = {
|
24 |
+
animes: [],
|
25 |
+
mostPopularAnimes: [],
|
26 |
+
currentPage: Number(page),
|
27 |
+
hasNextPage: false,
|
28 |
+
totalPages: 1,
|
29 |
+
searchQuery: q,
|
30 |
+
searchFilters: filters,
|
31 |
+
};
|
32 |
+
|
33 |
+
try {
|
34 |
+
const url = new URL(SRC_SEARCH_URL);
|
35 |
+
url.searchParams.set("keyword", q);
|
36 |
+
url.searchParams.set("page", `${page}`);
|
37 |
+
url.searchParams.set("sort", "default");
|
38 |
+
|
39 |
+
for (const key in filters) {
|
40 |
+
if (key.includes("_date")) {
|
41 |
+
const dates = getSearchDateFilterValue(
|
42 |
+
key === "start_date",
|
43 |
+
filters[key as keyof SearchFilters] || ""
|
44 |
+
);
|
45 |
+
if (!dates) continue;
|
46 |
+
|
47 |
+
dates.map((dateParam) => {
|
48 |
+
const [key, val] = dateParam.split("=");
|
49 |
+
url.searchParams.set(key, val);
|
50 |
+
});
|
51 |
+
continue;
|
52 |
+
}
|
53 |
+
|
54 |
+
const filterVal = getSearchFilterValue(
|
55 |
+
key as FilterKeys,
|
56 |
+
filters[key as keyof SearchFilters] || ""
|
57 |
+
);
|
58 |
+
filterVal && url.searchParams.set(key, filterVal);
|
59 |
+
}
|
60 |
+
|
61 |
+
const mainPage = await axios.get(url.href, {
|
62 |
+
headers: {
|
63 |
+
"User-Agent": USER_AGENT_HEADER,
|
64 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
65 |
+
Accept: ACCEPT_HEADER,
|
66 |
+
},
|
67 |
+
});
|
68 |
+
|
69 |
+
const $: CheerioAPI = load(mainPage.data);
|
70 |
+
|
71 |
+
const selector: SelectorType =
|
72 |
+
"#main-content .tab-content .film_list-wrap .flw-item";
|
73 |
+
|
74 |
+
res.hasNextPage =
|
75 |
+
$(".pagination > li").length > 0
|
76 |
+
? $(".pagination li.active").length > 0
|
77 |
+
? $(".pagination > li").last().hasClass("active")
|
78 |
+
? false
|
79 |
+
: true
|
80 |
+
: false
|
81 |
+
: false;
|
82 |
+
|
83 |
+
res.totalPages =
|
84 |
+
Number(
|
85 |
+
$('.pagination > .page-item a[title="Last"]')
|
86 |
+
?.attr("href")
|
87 |
+
?.split("=")
|
88 |
+
.pop() ??
|
89 |
+
$('.pagination > .page-item a[title="Next"]')
|
90 |
+
?.attr("href")
|
91 |
+
?.split("=")
|
92 |
+
.pop() ??
|
93 |
+
$(".pagination > .page-item.active a")?.text()?.trim()
|
94 |
+
) || 1;
|
95 |
+
|
96 |
+
res.animes = extractAnimes($, selector);
|
97 |
+
|
98 |
+
if (res.animes.length === 0 && !res.hasNextPage) {
|
99 |
+
res.totalPages = 0;
|
100 |
+
}
|
101 |
+
|
102 |
+
const mostPopularSelector: SelectorType =
|
103 |
+
"#main-sidebar .block_area.block_area_sidebar.block_area-realtime .anif-block-ul ul li";
|
104 |
+
res.mostPopularAnimes = extractMostPopularAnimes($, mostPopularSelector);
|
105 |
+
|
106 |
+
return res;
|
107 |
+
} catch (err: any) {
|
108 |
+
if (err instanceof AxiosError) {
|
109 |
+
throw createHttpError(
|
110 |
+
err?.response?.status || 500,
|
111 |
+
err?.response?.statusText || "Something went wrong"
|
112 |
+
);
|
113 |
+
}
|
114 |
+
throw createHttpError.InternalServerError(err?.message);
|
115 |
+
}
|
116 |
+
}
|
117 |
+
|
118 |
+
export default scrapeAnimeSearch;
|
src/parsers/animeSearchSuggestion.ts
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_HOME_URL,
|
3 |
+
SRC_AJAX_URL,
|
4 |
+
USER_AGENT_HEADER,
|
5 |
+
ACCEPT_ENCODING_HEADER,
|
6 |
+
} from "../utils/index.js";
|
7 |
+
import axios, { AxiosError } from "axios";
|
8 |
+
import createHttpError, { type HttpError } from "http-errors";
|
9 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
10 |
+
import type { ScrapedAnimeSearchSuggestion } from "../types/parsers/index.js";
|
11 |
+
|
12 |
+
// /anime/search/suggest?q=${query}
|
13 |
+
async function scrapeAnimeSearchSuggestion(
|
14 |
+
q: string
|
15 |
+
): Promise<ScrapedAnimeSearchSuggestion | HttpError> {
|
16 |
+
const res: ScrapedAnimeSearchSuggestion = {
|
17 |
+
suggestions: [],
|
18 |
+
};
|
19 |
+
|
20 |
+
try {
|
21 |
+
const { data } = await axios.get(
|
22 |
+
`${SRC_AJAX_URL}/search/suggest?keyword=${encodeURIComponent(q)}`,
|
23 |
+
{
|
24 |
+
headers: {
|
25 |
+
Accept: "*/*",
|
26 |
+
Pragma: "no-cache",
|
27 |
+
Referer: SRC_HOME_URL,
|
28 |
+
"User-Agent": USER_AGENT_HEADER,
|
29 |
+
"X-Requested-With": "XMLHttpRequest",
|
30 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
31 |
+
},
|
32 |
+
}
|
33 |
+
);
|
34 |
+
|
35 |
+
const $: CheerioAPI = load(data.html);
|
36 |
+
const selector: SelectorType = ".nav-item:has(.film-poster)";
|
37 |
+
|
38 |
+
if ($(selector).length < 1) return res;
|
39 |
+
|
40 |
+
$(selector).each((_, el) => {
|
41 |
+
const id = $(el).attr("href")?.split("?")[0].includes("javascript")
|
42 |
+
? null
|
43 |
+
: $(el).attr("href")?.split("?")[0]?.slice(1);
|
44 |
+
|
45 |
+
res.suggestions.push({
|
46 |
+
id,
|
47 |
+
name: $(el).find(".srp-detail .film-name")?.text()?.trim() || null,
|
48 |
+
jname:
|
49 |
+
$(el).find(".srp-detail .film-name")?.attr("data-jname")?.trim() ||
|
50 |
+
$(el).find(".srp-detail .alias-name")?.text()?.trim() ||
|
51 |
+
null,
|
52 |
+
poster: $(el)
|
53 |
+
.find(".film-poster .film-poster-img")
|
54 |
+
?.attr("data-src")
|
55 |
+
?.trim(),
|
56 |
+
moreInfo: [
|
57 |
+
...$(el)
|
58 |
+
.find(".film-infor")
|
59 |
+
.contents()
|
60 |
+
.map((_, el) => $(el).text().trim()),
|
61 |
+
].filter((i) => i),
|
62 |
+
});
|
63 |
+
});
|
64 |
+
|
65 |
+
return res;
|
66 |
+
} catch (err: any) {
|
67 |
+
if (err instanceof AxiosError) {
|
68 |
+
throw createHttpError(
|
69 |
+
err?.response?.status || 500,
|
70 |
+
err?.response?.statusText || "Something went wrong"
|
71 |
+
);
|
72 |
+
}
|
73 |
+
throw createHttpError.InternalServerError(err?.message);
|
74 |
+
}
|
75 |
+
}
|
76 |
+
|
77 |
+
export default scrapeAnimeSearchSuggestion;
|
src/parsers/episodeServers.ts
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_BASE_URL,
|
3 |
+
SRC_AJAX_URL,
|
4 |
+
ACCEPT_HEADER,
|
5 |
+
USER_AGENT_HEADER,
|
6 |
+
ACCEPT_ENCODING_HEADER,
|
7 |
+
} from "../utils/index.js";
|
8 |
+
import axios, { AxiosError } from "axios";
|
9 |
+
import createHttpError, { type HttpError } from "http-errors";
|
10 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
11 |
+
import type { ScrapedEpisodeServers } from "../types/parsers/index.js";
|
12 |
+
|
13 |
+
// /anime/servers?episodeId=${id}
|
14 |
+
async function scrapeEpisodeServers(
|
15 |
+
episodeId: string
|
16 |
+
): Promise<ScrapedEpisodeServers | HttpError> {
|
17 |
+
const res: ScrapedEpisodeServers = {
|
18 |
+
sub: [],
|
19 |
+
dub: [],
|
20 |
+
episodeId,
|
21 |
+
episodeNo: 0,
|
22 |
+
};
|
23 |
+
|
24 |
+
try {
|
25 |
+
const epId = episodeId.split("?ep=")[1];
|
26 |
+
|
27 |
+
const { data } = await axios.get(
|
28 |
+
`${SRC_AJAX_URL}/v2/episode/servers?episodeId=${epId}`,
|
29 |
+
{
|
30 |
+
headers: {
|
31 |
+
Accept: ACCEPT_HEADER,
|
32 |
+
"User-Agent": USER_AGENT_HEADER,
|
33 |
+
"X-Requested-With": "XMLHttpRequest",
|
34 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
35 |
+
Referer: new URL(`/watch/${episodeId}`, SRC_BASE_URL).href,
|
36 |
+
},
|
37 |
+
}
|
38 |
+
);
|
39 |
+
|
40 |
+
const $: CheerioAPI = load(data.html);
|
41 |
+
|
42 |
+
const epNoSelector: SelectorType = ".server-notice strong";
|
43 |
+
res.episodeNo = Number($(epNoSelector).text().split(" ").pop()) || 0;
|
44 |
+
|
45 |
+
$(`.ps_-block.ps_-block-sub.servers-sub .ps__-list .server-item`).each(
|
46 |
+
(_, el) => {
|
47 |
+
res.sub.push({
|
48 |
+
serverName: $(el).find("a").text().toLowerCase().trim(),
|
49 |
+
serverId: Number($(el)?.attr("data-server-id")?.trim()) || null,
|
50 |
+
});
|
51 |
+
}
|
52 |
+
);
|
53 |
+
|
54 |
+
$(`.ps_-block.ps_-block-sub.servers-dub .ps__-list .server-item`).each(
|
55 |
+
(_, el) => {
|
56 |
+
res.dub.push({
|
57 |
+
serverName: $(el).find("a").text().toLowerCase().trim(),
|
58 |
+
serverId: Number($(el)?.attr("data-server-id")?.trim()) || null,
|
59 |
+
});
|
60 |
+
}
|
61 |
+
);
|
62 |
+
|
63 |
+
return res;
|
64 |
+
} catch (err: any) {
|
65 |
+
if (err instanceof AxiosError) {
|
66 |
+
throw createHttpError(
|
67 |
+
err?.response?.status || 500,
|
68 |
+
err?.response?.statusText || "Something went wrong"
|
69 |
+
);
|
70 |
+
}
|
71 |
+
throw createHttpError.InternalServerError(err?.message);
|
72 |
+
}
|
73 |
+
}
|
74 |
+
|
75 |
+
export default scrapeEpisodeServers;
|
src/parsers/estimatedSchedule.ts
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_HOME_URL,
|
3 |
+
SRC_AJAX_URL,
|
4 |
+
USER_AGENT_HEADER,
|
5 |
+
ACCEPT_ENCODING_HEADER,
|
6 |
+
} from "../utils/index.js";
|
7 |
+
import axios, { AxiosError } from "axios";
|
8 |
+
import createHttpError, { type HttpError } from "http-errors";
|
9 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
10 |
+
import { type ScrapedEstimatedSchedule } from "../types/parsers/index.js";
|
11 |
+
|
12 |
+
// /anime/schedule?date=${date}
|
13 |
+
async function scrapeEstimatedSchedule(
|
14 |
+
date: string
|
15 |
+
): Promise<ScrapedEstimatedSchedule | HttpError> {
|
16 |
+
const res: ScrapedEstimatedSchedule = {
|
17 |
+
scheduledAnimes: [],
|
18 |
+
};
|
19 |
+
|
20 |
+
try {
|
21 |
+
const estScheduleURL =
|
22 |
+
`${SRC_AJAX_URL}/schedule/list?tzOffset=-330&date=${date}` as const;
|
23 |
+
|
24 |
+
const mainPage = await axios.get(estScheduleURL, {
|
25 |
+
headers: {
|
26 |
+
Accept: "*/*",
|
27 |
+
Referer: SRC_HOME_URL,
|
28 |
+
"User-Agent": USER_AGENT_HEADER,
|
29 |
+
"X-Requested-With": "XMLHttpRequest",
|
30 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
31 |
+
},
|
32 |
+
});
|
33 |
+
|
34 |
+
const $: CheerioAPI = load(mainPage?.data?.html);
|
35 |
+
|
36 |
+
const selector: SelectorType = "li";
|
37 |
+
|
38 |
+
if ($(selector)?.text()?.trim()?.includes("No data to display")) {
|
39 |
+
return res;
|
40 |
+
}
|
41 |
+
|
42 |
+
$(selector).each((_, el) => {
|
43 |
+
res.scheduledAnimes.push({
|
44 |
+
id: $(el)?.find("a")?.attr("href")?.slice(1)?.trim() || null,
|
45 |
+
time: $(el)?.find("a .time")?.text()?.trim() || null,
|
46 |
+
name: $(el)?.find("a .film-name.dynamic-name")?.text()?.trim() || null,
|
47 |
+
jname:
|
48 |
+
$(el)
|
49 |
+
?.find("a .film-name.dynamic-name")
|
50 |
+
?.attr("data-jname")
|
51 |
+
?.trim() || null,
|
52 |
+
});
|
53 |
+
});
|
54 |
+
|
55 |
+
return res;
|
56 |
+
} catch (err: any) {
|
57 |
+
if (err instanceof AxiosError) {
|
58 |
+
throw createHttpError(
|
59 |
+
err?.response?.status || 500,
|
60 |
+
err?.response?.statusText || "Something went wrong"
|
61 |
+
);
|
62 |
+
}
|
63 |
+
throw createHttpError.InternalServerError(err?.message);
|
64 |
+
}
|
65 |
+
}
|
66 |
+
|
67 |
+
export default scrapeEstimatedSchedule;
|
src/parsers/homePage.ts
ADDED
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {
|
2 |
+
SRC_HOME_URL,
|
3 |
+
ACCEPT_HEADER,
|
4 |
+
USER_AGENT_HEADER,
|
5 |
+
ACCEPT_ENCODING_HEADER,
|
6 |
+
extractTop10Animes,
|
7 |
+
extractAnimes,
|
8 |
+
} from "../utils/index.js";
|
9 |
+
import axios, { AxiosError } from "axios";
|
10 |
+
import createHttpError, { type HttpError } from "http-errors";
|
11 |
+
import type { ScrapedHomePage } from "../types/parsers/index.js";
|
12 |
+
import { load, type CheerioAPI, type SelectorType } from "cheerio";
|
13 |
+
|
14 |
+
// /anime/home
|
15 |
+
async function scrapeHomePage(): Promise<ScrapedHomePage | HttpError> {
|
16 |
+
const res: ScrapedHomePage = {
|
17 |
+
spotlightAnimes: [],
|
18 |
+
trendingAnimes: [],
|
19 |
+
latestEpisodeAnimes: [],
|
20 |
+
topUpcomingAnimes: [],
|
21 |
+
top10Animes: {
|
22 |
+
today: [],
|
23 |
+
week: [],
|
24 |
+
month: [],
|
25 |
+
},
|
26 |
+
topAiringAnimes: [],
|
27 |
+
genres: [],
|
28 |
+
};
|
29 |
+
|
30 |
+
try {
|
31 |
+
const mainPage = await axios.get(SRC_HOME_URL as string, {
|
32 |
+
headers: {
|
33 |
+
"User-Agent": USER_AGENT_HEADER,
|
34 |
+
"Accept-Encoding": ACCEPT_ENCODING_HEADER,
|
35 |
+
Accept: ACCEPT_HEADER,
|
36 |
+
},
|
37 |
+
});
|
38 |
+
|
39 |
+
const $: CheerioAPI = load(mainPage.data);
|
40 |
+
|
41 |
+
const spotlightSelector: SelectorType =
|
42 |
+
"#slider .swiper-wrapper .swiper-slide";
|
43 |
+
|
44 |
+
$(spotlightSelector).each((i, el) => {
|
45 |
+
const otherInfo = $(el)
|
46 |
+
.find(".deslide-item-content .sc-detail .scd-item")
|
47 |
+
.map((i, el) => $(el).text().trim())
|
48 |
+
.get()
|
49 |
+
.slice(0, -1);
|
50 |
+
|
51 |
+
res.spotlightAnimes.push({
|
52 |
+
rank:
|
53 |
+
Number(
|
54 |
+
$(el)
|
55 |
+
.find(".deslide-item-content .desi-sub-text")
|
56 |
+
?.text()
|
57 |
+
.trim()
|
58 |
+
.split(" ")[0]
|
59 |
+
.slice(1)
|
60 |
+
) || null,
|
61 |
+
id: $(el)
|
62 |
+
.find(".deslide-item-content .desi-buttons a")
|
63 |
+
?.last()
|
64 |
+
?.attr("href")
|
65 |
+
?.slice(1)
|
66 |
+
?.trim(),
|
67 |
+
name: $(el)
|
68 |
+
.find(".deslide-item-content .desi-head-title.dynamic-name")
|
69 |
+
?.text()
|
70 |
+
.trim(),
|
71 |
+
description: $(el)
|
72 |
+
.find(".deslide-item-content .desi-description")
|
73 |
+
?.text()
|
74 |
+
?.split("[")
|
75 |
+
?.shift()
|
76 |
+
?.trim(),
|
77 |
+
poster: $(el)
|
78 |
+
.find(".deslide-cover .deslide-cover-img .film-poster-img")
|
79 |
+
?.attr("data-src")
|
80 |
+
?.trim(),
|
81 |
+
jname: $(el)
|
82 |
+
.find(".deslide-item-content .desi-head-title.dynamic-name")
|
83 |
+
?.attr("data-jname")
|
84 |
+
?.trim(),
|
85 |
+
episodes: {
|
86 |
+
sub:
|
87 |
+
Number(
|
88 |
+
$(el)
|
89 |
+
.find(
|
90 |
+
".deslide-item-content .sc-detail .scd-item .tick-item.tick-sub"
|
91 |
+
)
|
92 |
+
?.text()
|
93 |
+
?.trim()
|
94 |
+
) || null,
|
95 |
+
dub:
|
96 |
+
Number(
|
97 |
+
$(el)
|
98 |
+
.find(
|
99 |
+
".deslide-item-content .sc-detail .scd-item .tick-item.tick-dub"
|
100 |
+
)
|
101 |
+
?.text()
|
102 |
+
?.trim()
|
103 |
+
) || null,
|
104 |
+
},
|
105 |
+
otherInfo,
|
106 |
+
});
|
107 |
+
});
|
108 |
+
|
109 |
+
const trendingSelector: SelectorType =
|
110 |
+
"#trending-home .swiper-wrapper .swiper-slide";
|
111 |
+
|
112 |
+
$(trendingSelector).each((i, el) => {
|
113 |
+
res.trendingAnimes.push({
|
114 |
+
rank: parseInt(
|
115 |
+
$(el).find(".item .number")?.children()?.first()?.text()?.trim()
|
116 |
+
),
|
117 |
+
name: $(el)
|
118 |
+
.find(".item .number .film-title.dynamic-name")
|
119 |
+
?.text()
|
120 |
+
?.trim(),
|
121 |
+
id: $(el).find(".item .film-poster")?.attr("href")?.slice(1)?.trim(),
|
122 |
+
poster: $(el)
|
123 |
+
.find(".item .film-poster .film-poster-img")
|
124 |
+
?.attr("data-src")
|
125 |
+
?.trim(),
|
126 |
+
});
|
127 |
+
});
|
128 |
+
|
129 |
+
const latestEpisodeSelector: SelectorType =
|
130 |
+
"#main-content .block_area_home:nth-of-type(1) .tab-content .film_list-wrap .flw-item";
|
131 |
+
res.latestEpisodeAnimes = extractAnimes($, latestEpisodeSelector);
|
132 |
+
|
133 |
+
const topUpcomingSelector: SelectorType =
|
134 |
+
"#main-content .block_area_home:nth-of-type(3) .tab-content .film_list-wrap .flw-item";
|
135 |
+
res.topUpcomingAnimes = extractAnimes($, topUpcomingSelector);
|
136 |
+
|
137 |
+
const genreSelector: SelectorType =
|
138 |
+
"#main-sidebar .block_area.block_area_sidebar.block_area-genres .sb-genre-list li";
|
139 |
+
$(genreSelector).each((i, el) => {
|
140 |
+
res.genres.push(`${$(el).text().trim()}`);
|
141 |
+
});
|
142 |
+
|
143 |
+
const mostViewedSelector: SelectorType =
|
144 |
+
'#main-sidebar .block_area-realtime [id^="top-viewed-"]';
|
145 |
+
$(mostViewedSelector).each((i, el) => {
|
146 |
+
const period = $(el).attr("id")?.split("-")?.pop()?.trim();
|
147 |
+
|
148 |
+
if (period === "day") {
|
149 |
+
res.top10Animes.today = extractTop10Animes($, period);
|
150 |
+
return;
|
151 |
+
}
|
152 |
+
if (period === "week") {
|
153 |
+
res.top10Animes.week = extractTop10Animes($, period);
|
154 |
+
return;
|
155 |
+
}
|
156 |
+
if (period === "month") {
|
157 |
+
res.top10Animes.month = extractTop10Animes($, period);
|
158 |
+
}
|
159 |
+
});
|
160 |
+
|
161 |
+
const topAiringSelector: SelectorType =
|
162 |
+
"#anime-featured .row div:nth-of-type(1) .anif-block-ul ul li";
|
163 |
+
$(topAiringSelector).each((i, el) => {
|
164 |
+
const otherInfo = $(el)
|
165 |
+
.find(".fd-infor .fdi-item")
|
166 |
+
.map((i, el) => $(el).text().trim())
|
167 |
+
.get();
|
168 |
+
|
169 |
+
res.topAiringAnimes.push({
|
170 |
+
id: $(el)
|
171 |
+
.find(".film-detail .film-name .dynamic-name")
|
172 |
+
?.attr("href")
|
173 |
+
?.slice(1)
|
174 |
+
?.trim(),
|
175 |
+
name: $(el)
|
176 |
+
.find(".film-detail .film-name .dynamic-name")
|
177 |
+
?.attr("title")
|
178 |
+
?.trim(),
|
179 |
+
jname: $(el)
|
180 |
+
.find(".film-detail .film-name .dynamic-name")
|
181 |
+
?.attr("data-jname")
|
182 |
+
?.trim(),
|
183 |
+
poster: $(el)
|
184 |
+
.find(".film-poster a .film-poster-img")
|
185 |
+
?.attr("data-src")
|
186 |
+
?.trim(),
|
187 |
+
otherInfo,
|
188 |
+
});
|
189 |
+
});
|
190 |
+
|
191 |
+
return res;
|
192 |
+
} catch (err: any) {
|
193 |
+
if (err instanceof AxiosError) {
|
194 |
+
throw createHttpError(
|
195 |
+
err?.response?.status || 500,
|
196 |
+
err?.response?.statusText || "Something went wrong"
|
197 |
+
);
|
198 |
+
}
|
199 |
+
throw createHttpError.InternalServerError(err?.message);
|
200 |
+
}
|
201 |
+
}
|
202 |
+
|
203 |
+
export default scrapeHomePage;
|
src/parsers/index.ts
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import scrapeHomePage from "./homePage.js";
|
2 |
+
import scrapeGenreAnime from "./animeGenre.js";
|
3 |
+
import scrapeAnimeSearch from "./animeSearch.js";
|
4 |
+
import scrapeAnimeEpisodes from "./animeEpisodes.js";
|
5 |
+
import scrapeAnimeCategory from "./animeCategory.js";
|
6 |
+
import scrapeProducerAnimes from "./animeProducer.js";
|
7 |
+
import scrapeEpisodeServers from "./episodeServers.js";
|
8 |
+
import scrapeAnimeAboutInfo from "./animeAboutInfo.js";
|
9 |
+
import scrapeEstimatedSchedule from "./estimatedSchedule.js";
|
10 |
+
import scrapeAnimeEpisodeSources from "./animeEpisodeSrcs.js";
|
11 |
+
import scrapeAnimeSearchSuggestion from "./animeSearchSuggestion.js";
|
12 |
+
|
13 |
+
export {
|
14 |
+
scrapeHomePage,
|
15 |
+
scrapeGenreAnime,
|
16 |
+
scrapeAnimeSearch,
|
17 |
+
scrapeAnimeEpisodes,
|
18 |
+
scrapeAnimeCategory,
|
19 |
+
scrapeEpisodeServers,
|
20 |
+
scrapeProducerAnimes,
|
21 |
+
scrapeAnimeAboutInfo,
|
22 |
+
scrapeEstimatedSchedule,
|
23 |
+
scrapeAnimeEpisodeSources,
|
24 |
+
scrapeAnimeSearchSuggestion,
|
25 |
+
};
|
src/routes/index.ts
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Router, type IRouter } from "express";
|
2 |
+
import {
|
3 |
+
getGenreAnime,
|
4 |
+
getAnimeSearch,
|
5 |
+
getHomePageInfo,
|
6 |
+
getAnimeCategory,
|
7 |
+
getAnimeEpisodes,
|
8 |
+
getEpisodeServers,
|
9 |
+
getProducerAnimes,
|
10 |
+
getAnimeAboutInfo,
|
11 |
+
getEstimatedSchedule,
|
12 |
+
getAnimeEpisodeSources,
|
13 |
+
getAnimeSearchSuggestion,
|
14 |
+
} from "../controllers/index.js";
|
15 |
+
|
16 |
+
const router: IRouter = Router();
|
17 |
+
|
18 |
+
// /anime
|
19 |
+
router.get("/", (_, res) => res.redirect("/"));
|
20 |
+
|
21 |
+
// /anime/home
|
22 |
+
router.get("/home", getHomePageInfo);
|
23 |
+
|
24 |
+
// /anime/info?id=${anime-id}
|
25 |
+
router.get("/info", getAnimeAboutInfo);
|
26 |
+
|
27 |
+
// /anime/genre/${name}?page=${page}
|
28 |
+
router.get("/genre/:name", getGenreAnime);
|
29 |
+
|
30 |
+
// /anime/search?q=${query}&page=${page}
|
31 |
+
router.get("/search", getAnimeSearch);
|
32 |
+
|
33 |
+
// /anime/search/suggest?q=${query}
|
34 |
+
router.get("/search/suggest", getAnimeSearchSuggestion);
|
35 |
+
|
36 |
+
// /anime/episodes/${anime-id}
|
37 |
+
router.get("/episodes/:animeId", getAnimeEpisodes);
|
38 |
+
|
39 |
+
// /anime/servers?episodeId=${id}
|
40 |
+
router.get("/servers", getEpisodeServers);
|
41 |
+
|
42 |
+
// episodeId=steinsgate-3?ep=230
|
43 |
+
// /anime/episode-srcs?id=${episodeId}?server=${server}&category=${category (dub or sub)}
|
44 |
+
router.get("/episode-srcs", getAnimeEpisodeSources);
|
45 |
+
|
46 |
+
// /anime/schedule?date=${date}
|
47 |
+
router.get("/schedule", getEstimatedSchedule);
|
48 |
+
|
49 |
+
// /anime/producer/${name}?page=${page}
|
50 |
+
router.get("/producer/:name", getProducerAnimes);
|
51 |
+
|
52 |
+
// /anime/:category?page=${page}
|
53 |
+
router.get("/:category", getAnimeCategory);
|
54 |
+
|
55 |
+
export default router;
|
src/server.ts
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import https from "https";
|
2 |
+
import morgan from "morgan";
|
3 |
+
import express from "express";
|
4 |
+
import { resolve } from "path";
|
5 |
+
import { config } from "dotenv";
|
6 |
+
|
7 |
+
import corsConfig from "./config/cors.js";
|
8 |
+
import { ratelimit } from "./config/ratelimit.js";
|
9 |
+
import errorHandler from "./config/errorHandler.js";
|
10 |
+
import notFoundHandler from "./config/notFoundHandler.js";
|
11 |
+
|
12 |
+
import animeRouter from "./routes/index.js";
|
13 |
+
|
14 |
+
config();
|
15 |
+
const app: express.Application = express();
|
16 |
+
const PORT: number = Number(process.env.PORT) || 4000;
|
17 |
+
|
18 |
+
app.use(morgan("dev"));
|
19 |
+
app.use(corsConfig);
|
20 |
+
|
21 |
+
// CAUTION: For personal deployments, "refrain" from having an env
|
22 |
+
// named "ANIWATCH_API_HOSTNAME". You may face rate limitting
|
23 |
+
// and other issues if you do.
|
24 |
+
const ISNT_PERSONAL_DEPLOYMENT = Boolean(process?.env?.ANIWATCH_API_HOSTNAME);
|
25 |
+
if (ISNT_PERSONAL_DEPLOYMENT) {
|
26 |
+
app.use(ratelimit);
|
27 |
+
}
|
28 |
+
|
29 |
+
app.use(express.static(resolve("public")));
|
30 |
+
app.get("/health", (_, res) => res.sendStatus(200));
|
31 |
+
app.use("/anime", animeRouter);
|
32 |
+
|
33 |
+
app.use(notFoundHandler);
|
34 |
+
app.use(errorHandler);
|
35 |
+
|
36 |
+
// NOTE: this env is "required" for vercel deployments
|
37 |
+
if (!Boolean(process?.env?.IS_VERCEL_DEPLOYMENT)) {
|
38 |
+
app.listen(PORT, () => {
|
39 |
+
console.log(`⚔️ api @ http://localhost:${PORT}`);
|
40 |
+
});
|
41 |
+
|
42 |
+
// NOTE: remove the `if` block below for personal deployments
|
43 |
+
if (ISNT_PERSONAL_DEPLOYMENT) {
|
44 |
+
// don't sleep
|
45 |
+
const intervalTime = 9 * 60 * 1000; // 9mins
|
46 |
+
setInterval(() => {
|
47 |
+
console.log("HEALTHCHECK ;)", new Date().toLocaleString());
|
48 |
+
https
|
49 |
+
.get(
|
50 |
+
new URL("/health", `https://${process.env.ANIWATCH_API_HOSTNAME}`)
|
51 |
+
.href
|
52 |
+
)
|
53 |
+
.on("error", (err) => {
|
54 |
+
console.error(err.message);
|
55 |
+
});
|
56 |
+
}, intervalTime);
|
57 |
+
}
|
58 |
+
}
|
59 |
+
|
60 |
+
export default app;
|
src/types/anime.ts
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export interface Anime {
|
2 |
+
id: string | null;
|
3 |
+
name: string | null;
|
4 |
+
poster: string | null;
|
5 |
+
duration: string | null;
|
6 |
+
type: string | null;
|
7 |
+
rating: string | null;
|
8 |
+
episodes: {
|
9 |
+
sub: number | null;
|
10 |
+
dub: number | null;
|
11 |
+
};
|
12 |
+
}
|
13 |
+
|
14 |
+
type CommonAnimeProps = "id" | "name" | "poster";
|
15 |
+
|
16 |
+
export interface Top10Anime extends Pick<Anime, CommonAnimeProps | "episodes"> {
|
17 |
+
rank: number | null;
|
18 |
+
}
|
19 |
+
|
20 |
+
export type Top10AnimeTimePeriod = "day" | "week" | "month";
|
21 |
+
|
22 |
+
export interface MostPopularAnime
|
23 |
+
extends Pick<Anime, CommonAnimeProps | "episodes" | "type"> {
|
24 |
+
jname: string | null;
|
25 |
+
}
|
26 |
+
|
27 |
+
export interface SpotlightAnime
|
28 |
+
extends MostPopularAnime,
|
29 |
+
Pick<Top10Anime, "rank"> {
|
30 |
+
description: string | null;
|
31 |
+
}
|
32 |
+
|
33 |
+
export interface TrendingAnime
|
34 |
+
extends Pick<Anime, CommonAnimeProps>,
|
35 |
+
Pick<Top10Anime, "rank"> {}
|
36 |
+
|
37 |
+
export interface LatestEpisodeAnime extends Anime {}
|
38 |
+
|
39 |
+
export interface TopUpcomingAnime extends Anime {}
|
40 |
+
|
41 |
+
export interface TopAiringAnime extends MostPopularAnime {}
|
42 |
+
|
43 |
+
export interface AnimeGeneralAboutInfo
|
44 |
+
extends Pick<Anime, CommonAnimeProps>,
|
45 |
+
Pick<SpotlightAnime, "description"> {
|
46 |
+
stats: {
|
47 |
+
quality: string | null;
|
48 |
+
} & Pick<Anime, "duration" | "episodes" | "rating" | "type">;
|
49 |
+
}
|
50 |
+
|
51 |
+
export interface RecommendedAnime extends Anime {}
|
52 |
+
|
53 |
+
export interface RelatedAnime extends MostPopularAnime {}
|
54 |
+
|
55 |
+
export interface Season extends Pick<Anime, CommonAnimeProps> {
|
56 |
+
isCurrent: boolean;
|
57 |
+
title: string | null;
|
58 |
+
}
|
59 |
+
|
60 |
+
export interface AnimeSearchSuggestion
|
61 |
+
extends Omit<MostPopularAnime, "episodes" | "type"> {
|
62 |
+
moreInfo: Array<string>;
|
63 |
+
}
|
64 |
+
|
65 |
+
export interface AnimeEpisode extends Pick<Season, "title"> {
|
66 |
+
episodeId: string | null;
|
67 |
+
number: number;
|
68 |
+
isFiller: boolean;
|
69 |
+
}
|
70 |
+
|
71 |
+
export interface SubEpisode {
|
72 |
+
serverName: string;
|
73 |
+
serverId: number | null;
|
74 |
+
}
|
75 |
+
export interface DubEpisode extends SubEpisode {}
|
76 |
+
|
77 |
+
export type AnimeCategories =
|
78 |
+
| "most-favorite"
|
79 |
+
| "most-popular"
|
80 |
+
| "subbed-anime"
|
81 |
+
| "dubbed-anime"
|
82 |
+
| "recently-updated"
|
83 |
+
| "recently-added"
|
84 |
+
| "top-upcoming"
|
85 |
+
| "top-airing"
|
86 |
+
| "movie"
|
87 |
+
| "special"
|
88 |
+
| "ova"
|
89 |
+
| "ona"
|
90 |
+
| "tv"
|
91 |
+
| "completed";
|
92 |
+
|
93 |
+
export type AnimeServers =
|
94 |
+
| "vidstreaming"
|
95 |
+
| "megacloud"
|
96 |
+
| "streamsb"
|
97 |
+
| "streamtape"
|
98 |
+
| "vidcloud";
|
99 |
+
|
100 |
+
export enum Servers {
|
101 |
+
VidStreaming = "vidstreaming",
|
102 |
+
MegaCloud = "megacloud",
|
103 |
+
StreamSB = "streamsb",
|
104 |
+
StreamTape = "streamtape",
|
105 |
+
VidCloud = "vidcloud",
|
106 |
+
AsianLoad = "asianload",
|
107 |
+
GogoCDN = "gogocdn",
|
108 |
+
MixDrop = "mixdrop",
|
109 |
+
UpCloud = "upcloud",
|
110 |
+
VizCloud = "vizcloud",
|
111 |
+
MyCloud = "mycloud",
|
112 |
+
Filemoon = "filemoon",
|
113 |
+
}
|
src/types/controllers/animeAboutInfo.ts
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
export type AnimeAboutInfoQueryParams = {
|
2 |
+
id?: string;
|
3 |
+
};
|
src/types/controllers/animeCategory.ts
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export type CategoryAnimePathParams = {
|
2 |
+
category?: string;
|
3 |
+
};
|
4 |
+
|
5 |
+
export type CategoryAnimeQueryParams = {
|
6 |
+
page?: string;
|
7 |
+
};
|