Neon-AI commited on
Commit
30a1d76
·
verified ·
1 Parent(s): f92fb28

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -39
app.py CHANGED
@@ -47,63 +47,66 @@ def search_anime(keyword: str):
47
  except Exception as e:
48
  raise HTTPException(status_code=500, detail=f"Failed to search anime: {str(e)}")
49
 
50
- @app.get("/api/anime")
51
- def anime_metadata(url: str):
52
  """
53
- Returns metadata and all episodes for a given anime page URL on HiAnime.
54
- Example URL: https://hianime.to/naruto
55
  """
56
  try:
 
57
  headers = {"User-Agent": "Mozilla/5.0"}
58
- resp = requests.get(url, headers=headers)
59
  if resp.status_code != 200:
60
  raise HTTPException(status_code=500, detail="Failed to fetch anime page")
61
 
62
  soup = BeautifulSoup(resp.text, "html.parser")
63
 
64
- # Title
65
- title_elem = soup.select_one("h1.film-name")
66
- title = title_elem.text.strip() if title_elem else None
67
-
68
- # Description
69
- desc_elem = soup.select_one(".film-description")
70
- description = desc_elem.text.strip() if desc_elem else None
71
-
72
- # Thumbnail
73
- thumb_elem = soup.select_one(".film-poster-img")
74
- thumbnail = thumb_elem.get("data-src") or thumb_elem.get("src") if thumb_elem else None
 
 
 
 
 
 
 
 
 
 
 
75
 
76
  # Genres
77
- genres = [g.text.strip() for g in soup.select(".film-genres a")]
78
-
79
- # Rating
80
- rating_elem = soup.select_one(".film-rate")
81
- rating = rating_elem.text.strip() if rating_elem else None
82
-
83
- # Episodes
84
- ep_list = []
85
- episode_items = soup.select(".film-episodes a")
86
- for ep in episode_items:
87
- ep_number = ep.text.strip()
88
- ep_url = ep.get("href")
89
- ep_type = "dub" if "dub" in ep_url.lower() else "sub"
90
- ep_list.append({
91
- "episode": ep_number,
92
- "url": ep_url,
93
- "type": ep_type
94
- })
95
 
96
  return {
97
  "title": title,
98
- "description": description,
99
- "thumbnail": thumbnail,
100
- "genres": genres,
 
 
101
  "rating": rating,
102
- "episodes": ep_list
 
 
 
 
103
  }
104
 
105
  except Exception as e:
106
- raise HTTPException(status_code=500, detail=f"Failed to get anime metadata: {str(e)}")
107
  # ===== END PATCH =====
108
 
109
  if __name__ == "__main__":
 
47
  except Exception as e:
48
  raise HTTPException(status_code=500, detail=f"Failed to search anime: {str(e)}")
49
 
50
+ @app.get("/metadata")
51
+ def get_metadata(url: str):
52
  """
53
+ Example call: /metadata?url=/naruto-677
 
54
  """
55
  try:
56
+ full_url = f"https://hianime.to{url}"
57
  headers = {"User-Agent": "Mozilla/5.0"}
58
+ resp = requests.get(full_url, headers=headers)
59
  if resp.status_code != 200:
60
  raise HTTPException(status_code=500, detail="Failed to fetch anime page")
61
 
62
  soup = BeautifulSoup(resp.text, "html.parser")
63
 
64
+ # Title / Japanese / Synonyms
65
+ title = soup.select_one("h1.film-name").text.strip() if soup.select_one("h1.film-name") else None
66
+ japanese = soup.select_one(".item-head:contains('Japanese:') + .name")
67
+ japanese = japanese.text.strip() if japanese else None
68
+ synonyms = soup.select_one(".item-head:contains('Synonyms:') + .name")
69
+ synonyms = synonyms.text.strip() if synonyms else None
70
+
71
+ # Status, type, duration
72
+ status = soup.select_one(".item-head:contains('Status:') + .name")
73
+ status = status.text.strip() if status else None
74
+ anime_type = soup.select_one(".item span.item")
75
+ anime_type = anime_type.text.strip() if anime_type else None
76
+ duration_elem = soup.select(".item span.item")
77
+ duration = duration_elem[1].text.strip() if len(duration_elem) > 1 else None
78
+
79
+ # Rating, quality, sub, dub, episodes
80
+ tick_items = soup.select(".tick-item")
81
+ rating = tick_items[0].text.strip() if len(tick_items) > 0 else None
82
+ quality = tick_items[1].text.strip() if len(tick_items) > 1 else None
83
+ sub_count = tick_items[2].text.strip() if len(tick_items) > 2 else None
84
+ dub_count = tick_items[3].text.strip() if len(tick_items) > 3 else None
85
+ episodes = tick_items[4].text.strip() if len(tick_items) > 4 else None
86
 
87
  # Genres
88
+ genres = []
89
+ genre_links = soup.select(".item.item-list a[href^='/genre/']")
90
+ for g in genre_links:
91
+ genres.append(g.text.strip())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
 
93
  return {
94
  "title": title,
95
+ "japanese": japanese,
96
+ "synonyms": synonyms,
97
+ "status": status,
98
+ "type": anime_type,
99
+ "duration": duration,
100
  "rating": rating,
101
+ "quality": quality,
102
+ "sub_count": sub_count,
103
+ "dub_count": dub_count,
104
+ "episodes": episodes,
105
+ "genres": genres,
106
  }
107
 
108
  except Exception as e:
109
+ raise HTTPException(status_code=500, detail=f"Failed to fetch metadata: {str(e)}")
110
  # ===== END PATCH =====
111
 
112
  if __name__ == "__main__":