Skip to content

Commit

Permalink
Support agdm.tv
Browse files Browse the repository at this point in the history
  • Loading branch information
rkwyu committed Apr 25, 2024
1 parent c58eb6f commit 2308c15
Show file tree
Hide file tree
Showing 7 changed files with 175 additions and 5 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ It currently supports following websites:
- [x] [yhdm.one](https://yhdm.one/)
- [x] [xgcartoon.com](https://www.xgcartoon.com/)
- [x] [lincartoon.com](https://www.lincartoon.com/)
- [x] [agdm.tv](https://www.agdm.tv/)

Pending to supports:
- [ ] [kickassanime.mx](https://www1.kickassanime.mx/)
Expand Down
3 changes: 3 additions & 0 deletions anime_dl/anime_dl.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from anime_dl.const import regex
from anime_dl.downloader.downloader import Downloader
from anime_dl.downloader.ffmpeg_strategy import FfmpegStrategy
from anime_dl.scrapper.agdm_tv_creator import AgdmTvCreator
from anime_dl.scrapper.anime1_in_creator import Anime1InCreator
from anime_dl.scrapper.anime1_me_creator import Anime1MeCreator
from anime_dl.scrapper.xgcartoon_creator import XgCartoonCreator
Expand Down Expand Up @@ -32,6 +33,8 @@ def main(url: str) -> None:
scrapper = Anime1InCreator()
elif re.search(regex.URL["yhdm.one"]["domain"], url):
scrapper = YhdmOneCreator()
elif re.search(regex.URL["agdm.tv"]["domain"], url):
scrapper = AgdmTvCreator()
else:
raise Exception(f"Unsupported URL: {url}")

Expand Down
5 changes: 5 additions & 0 deletions anime_dl/const/regex.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,9 @@
"series": "^https?:\/\/yhdm\.one\/vod\/[0-9]+.html$",
"episode": "^https?:\/\/yhdm\.one\/vod-play\/([0-9]+)\/([a-zA-Z0-9]+).html$",
},
"agdm.tv": {
"domain": "^https?:\/\/agdm\.tv\/.+$",
"series": "^https?:\/\/agdm\.tv\/vod\/([0-9]+).html$",
"episode": "^https?:\/\/agdm\.tv\/play\/([0-9]+)-([0-9]+)-([0-9]+).html$",
},
}
2 changes: 1 addition & 1 deletion anime_dl/downloader/ffmpeg_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def download(self, episode: Episode) -> None:
)
os.makedirs(os.path.dirname(output), exist_ok=True)
if os.path.exists(output) is False:
logger.info(f"started download: {filename}")
logger.info(f"started download: {filename} ({url})")
stream = ffmpeg.input(url)
stream = ffmpeg.output(stream, output, vcodec="copy", acodec="copy")
ffmpeg.run(stream)
Expand Down
8 changes: 8 additions & 0 deletions anime_dl/scrapper/agdm_tv_creator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from anime_dl.scrapper.creator import Creator
from anime_dl.scrapper.scrapper import Scrapper
from anime_dl.scrapper.agdm_tv_scrapper import AgdmTvScrapper


class AgdmTvCreator(Creator):
def factory_method(self) -> Scrapper:
return AgdmTvScrapper()
153 changes: 153 additions & 0 deletions anime_dl/scrapper/agdm_tv_scrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
import json
import logging
import re
import requests
import typing

from anime_dl.const import regex, general
from anime_dl.object.episode import Episode
from anime_dl.scrapper.scrapper import Scrapper
from anime_dl.utils.logger import Logger
from anime_dl.utils.progress_bar import ProgressBar
from bs4 import BeautifulSoup
from urllib.parse import urlparse, parse_qs

logger = Logger()


class AgdmTvScrapper(Scrapper):
def get_episodes(self, url: str) -> typing.List[Episode]:
if re.search(regex.URL["agdm.tv"]["series"], url):
episodes = self.parse_series(url)
progress_bar = ProgressBar("Link Fetching", 1, len(episodes))
for episode in episodes:
e, x_servers = self.parse_episode(episode.referer_url)
# check if video source available
if requests.get(e.video_url, headers=general.REQUEST["header"]).ok:
episode = (
episode.set_series_name(e.series_name)
.set_season(e.season)
.set_episode_name(e.episode_name)
.set_episode_no(e.episode_no)
.set_video_url(e.video_url)
)
# try other servers
else:
logger.warning(f"video source fail: {episode.referer_url} ({e.video_url})")
m = re.search(regex.URL["agdm.tv"]["episode"], episode.referer_url)
id = int(m.groups()[0])
server = int(m.groups()[1])
episode_no = m.groups()[2]
for x_server in x_servers:
x_url = (
f"https://agdm.tv/play/{id}-{x_server}-{episode_no}.html"
)
logger.info(f"try another server: {x_url}")
e, _ = self.parse_episode(x_url)
if requests.get(
e.video_url, headers=general.REQUEST["header"]
).ok:
episode = (
episode.set_series_name(e.series_name)
.set_season(e.season)
.set_episode_name(e.episode_name)
.set_episode_no(e.episode_no)
.set_video_url(e.video_url)
)
else:
logger.warning(f"video source fail: {x_url} ({e.video_url})")
progress_bar.print()
return episodes
elif re.search(regex.URL["agdm.tv"]["episode"], url):
m = re.search(regex.URL["agdm.tv"]["episode"], url)
id = int(m.groups()[0])
server = int(m.groups()[1])
episode_no = m.groups()[2]
episode, x_servers = self.parse_episode(url)
# check if video source available
if requests.get(episode.video_url, headers=general.REQUEST["header"]).ok:
return [episode]
# try other servers
else:
logger.warning(f"video source fail: {url} ({episode.video_url})")
for x_server in x_servers:
x_url = f"https://agdm.tv/play/{id}-{x_server}-{episode_no}.html"
logger.info(f"try another server: {x_url}")
episode, _ = self.parse_episode(x_url)
if requests.get(
episode.video_url, headers=general.REQUEST["header"]
).ok:
return [episode]
else:
logger.warning(f"video source fail: {x_url} ({episode.video_url})")
return [Episode()]
else:
raise Exception(f"Unsupported URL: {url}")

def parse_series(self, url: str) -> typing.List[Episode]:
try:
episodes = []
headers = general.REQUEST["header"]
doc = BeautifulSoup(requests.get(url, headers=headers).text, "html.parser")
image_src = (
doc.select_one("a.myui-vodlist__thumb img.lazyload")
.attrs["src"]
.strip()
)
# select_one for 1st playlist
content_list = doc.select_one("ul.myui-content__list")
items = content_list.select("li a")
for item in items:
referer_url = "https://agdm.tv" + item.attrs["href"].strip()
episodes.append(
Episode().set_referer_url(referer_url).set_image_src(image_src)
)
return episodes
except Exception as e:
logger.error(f"{url}: {e}")
return []

def parse_episode(self, url: str) -> tuple[Episode, typing.List[int]]:
try:
m = re.search(regex.URL["agdm.tv"]["episode"], url)
id = int(m.groups()[0])
server = int(m.groups()[1])
episode_no = m.groups()[2]
headers = general.REQUEST["header"]
doc = BeautifulSoup(requests.get(url, headers=headers).text, "html.parser")
html = str(doc)
player_aaaa = json.loads(
html.split("var player_aaaa=")[1]
.split("</script>")[0]
.strip()
.encode()
.decode("unicode_escape")
)
series_name = player_aaaa["vod_data"]["vod_name"]
episode_name = doc.select_one(
".myui-panel_hd small.text-muted"
).text.strip()
video_url = player_aaaa["url"]
playlists = doc.select("ul.nav-tabs li a[href^='#playlist']")
x_servers = list(
filter(
lambda s: s != server,
map(
lambda p: int(p.attrs["href"].replace("#playlist", "")),
playlists,
),
)
)
return (
Episode()
.set_series_name(series_name)
.set_season("na")
.set_episode_name(episode_name)
.set_episode_no(episode_no)
.set_video_url(video_url)
.set_referer_url(url)
.set_image_src("na")
), x_servers
except Exception as e:
logger.error(f"{url}: {e}")
return Episode()
8 changes: 4 additions & 4 deletions anime_dl/scrapper/yhdm_one_scrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@ def parse_series(self, url: str) -> typing.List[Episode]:
series_name = doc.select_one("h1.names").text.strip()
image_src = doc.select_one("div.detail-poster img").attrs["src"].strip()
episode_no = 1
chapters = doc.select("div.ep-panel a")
items = doc.select("div.ep-panel a")
progress_bar = ProgressBar("Link Fetching", 1, len(episodes))
for chapter in reversed(chapters):
episode_name = chapter.text.strip()
referer_url = "https://yhdm.one" + chapter.attrs["href"].strip()
for item in reversed(items):
episode_name = item.text.strip()
referer_url = "https://yhdm.one" + item.attrs["href"].strip()
episodes.append(
Episode()
.set_series_name(series_name)
Expand Down

0 comments on commit 2308c15

Please sign in to comment.