Skip to content

Commit

Permalink
Added EpsilonScan (adult version) (#12)
Browse files Browse the repository at this point in the history
Fixed tracking series without ID on asura.
  • Loading branch information
MooshiMochi authored Mar 28, 2024
1 parent e02f06d commit 3ad71ef
Show file tree
Hide file tree
Showing 5 changed files with 120 additions and 8 deletions.
8 changes: 8 additions & 0 deletions Changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,14 @@

#### Consider supporting me on [Patreon](https://patreon.com/mooshi69) or [Ko-Fi](https://ko-fi.com/mooshi69)!

## // March 27th 2024

- Added EpsilonScan.fr (adult verison) to the bot.

### Bug Fixes:

- Fixed bug where bot throws error when tracking a series from asura that does not contain an ID.

## // March 26th 2024

- Made SettingsView's create_embed method public.
Expand Down
27 changes: 20 additions & 7 deletions src/core/scanlators/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,15 @@ def remove_unwanted_tags(soup: BeautifulSoup, unwanted_selectors: list[str]):
async def _get_status_tag(self: BasicScanlator, raw_url: str) -> bs4.Tag | None:
if self.json_tree.properties.no_status:
method = "POST" if self.json_tree.uses_ajax else "GET"
text = await self._get_text(await self.format_manga_url(raw_url, use_ajax_url=True), method=method) # noqa
if not isinstance(self, DynamicURLScanlator):
request_url = await self.format_manga_url(raw_url, use_ajax_url=True)
else:
requests_url = raw_url
text = await self._get_text(requests_url, method=method) # noqa
else:
text = await self._get_text(await self.format_manga_url(raw_url))

text = await self._get_text(
await self.format_manga_url(raw_url) if not isinstance(self, DynamicURLScanlator) else raw_url)
soup = BeautifulSoup(text, "html.parser")
self.remove_unwanted_tags(soup, self.json_tree.selectors.unwanted_tags)

Expand Down Expand Up @@ -546,7 +552,9 @@ async def get_id(self, raw_url: str) -> str:
return url_id

async def get_title(self, raw_url: str) -> str | None:
text = await self._get_text(await self.format_manga_url(raw_url))
if not isinstance(self, DynamicURLScanlator):
raw_url = await self.format_manga_url(raw_url)
text = await self._get_text(raw_url)
soup = BeautifulSoup(text, "html.parser")
self.remove_unwanted_tags(soup, self.json_tree.selectors.unwanted_tags)

Expand All @@ -560,9 +568,10 @@ async def get_title(self, raw_url: str) -> str | None:
return title.get_text(strip=True)

async def get_all_chapters(self, raw_url: str) -> list[Chapter]:
req_url = await self.format_manga_url(raw_url, use_ajax_url=True)
if not isinstance(self, DynamicURLScanlator):
raw_url = await self.format_manga_url(raw_url, use_ajax_url=True)
method = "POST" if self.json_tree.uses_ajax else "GET"
text = await self._get_text(req_url, method=method) # noqa
text = await self._get_text(raw_url, method=method) # noqa
return self._extract_chapters_from_html(text)

def _extract_chapters_from_html(self, text: str) -> list[Chapter]:
Expand Down Expand Up @@ -593,7 +602,9 @@ async def get_status(self, raw_url: str) -> str:
return re.sub(r"\W", "", status_text).lower().removeprefix("status").strip().title()

async def get_synopsis(self, raw_url: str) -> str:
text = await self._get_text(await self.format_manga_url(raw_url))
if not isinstance(self, DynamicURLScanlator):
raw_url = await self.format_manga_url(raw_url)
text = await self._get_text(raw_url)
soup = BeautifulSoup(text, "html.parser")
self.remove_unwanted_tags(soup, self.json_tree.selectors.unwanted_tags)

Expand All @@ -603,7 +614,9 @@ async def get_synopsis(self, raw_url: str) -> str:
return synopsis.get_text(strip=True, separator="\n")

async def get_cover(self, raw_url: str) -> str:
text = await self._get_text(await self.format_manga_url(raw_url))
if not isinstance(self, DynamicURLScanlator):
raw_url = await self.format_manga_url(raw_url)
text = await self._get_text(raw_url)
soup = BeautifulSoup(text, "html.parser")
self.remove_unwanted_tags(soup, self.json_tree.selectors.unwanted_tags)

Expand Down
72 changes: 72 additions & 0 deletions src/core/scanlators/lookup_map.json
Original file line number Diff line number Diff line change
Expand Up @@ -2431,6 +2431,78 @@
},
"request_method": "GET"
}
},
"epsilonscan": {
"chapter_ajax": "html",
"request_method": "http",
"url_regex": "(?:https?://)?(?:www\\.)?epsilonscan\\.fr/manga/(?P<url_name>[\\w-]+)(?:/.*)?",
"properties": {
"base_url": "https://epsilonscan.fr",
"icon_url": "https://epsilonscan.fr/wp-content/uploads/2024/02/logo_grand_2-1.png",
"format_urls": {
"manga": "https://epsilonscan.fr/manga/{url_name}/",
"ajax": "https://epsilonscan.fr/manga/{url_name}/ajax/chapters"
},
"latest_updates_url": "https://epsilonscan.fr/",
"dynamicURL": false,
"requires_update_embed": true,
"can_render_cover": true
},
"selectors": {
"title": [
"div.post-title > h1"
],
"synopsis": "div.manga-excerpt",
"cover": [
"[property=og\\:image]",
"div.summary_image > a > img"
],
"chapters": {
"container": "li.wp-manga-chapter",
"name": "a",
"url": "a"
},
"status": [
"div.summary-heading:-soup-contains(\"Status\") + div"
],
"front_page": {
"container": "div.page-item-detail.manga",
"chapters": {
"container": "div.chapter-item > span.chapter",
"name": "a",
"url": "a"
},
"title": "div.post-title > h3.h5 > a",
"url": "div.post-title > h3.h5 > a",
"cover": "img"
},
"unwanted_tags": [
"span.manga-title-badges"
],
"search": {
"container": "div.c-tabs-item__content",
"title": "h3.h4 > a",
"url": "h3.h4 > a",
"cover": "img",
"chapters": {
"container": "span.chapter",
"name": "a",
"url": "a"
}
}
},
"search": {
"url": "https://epsilonscan.fr/",
"search_param_name": "s",
"extra_params": {
"post_type": "wp-manga"
},
"as_type": "param",
"query_parsing": {
"encoding": "url"
},
"request_method": "GET"
}
}
},
"custom": {
Expand Down
2 changes: 1 addition & 1 deletion tests/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,7 +570,7 @@ async def test_single_scanlator(scanlator: str):
asyncio.run(main())
else:
# asyncio.run(test_single_method("show_front_page_results", "mangabat"))
asyncio.run(test_single_scanlator("nightscans"))
asyncio.run(test_single_scanlator("epsilonscan"))
# asyncio.run(sub_main())
# asyncio.run(paused_test())
# asyncio.run(main())
19 changes: 19 additions & 0 deletions tests/test_map.json
Original file line number Diff line number Diff line change
Expand Up @@ -918,5 +918,24 @@
],
"has_fp_manhwa": true
}
},
"epsilonscan": {
"user_input_url": "https://epsilonscan.fr/manga/not-safe-for-work/",
"expected_results": {
"scanlator_name": "epsilonscan",
"manga_url": "https://epsilonscan.fr/manga/not-safe-for-work/",
"completed": true,
"title": "Not Safe For Work",
"manga_id": "default_id_function",
"use_default_id_function": true,
"curr_chapter_url": "https://epsilonscan.fr/manga/not-safe-for-work/chapitre-24/",
"first_chapter_url": "https://epsilonscan.fr/manga/not-safe-for-work/chapitre-1/",
"cover_image": "https://epsilonscan.fr/wp-content/uploads/2024/02/NSFW-cover-02_digital_art_x4_colored_toned_light_ai-1.webp",
"last_3_chapter_urls": [
"https://epsilonscan.fr/manga/not-safe-for-work/chapitre-22/",
"https://epsilonscan.fr/manga/not-safe-for-work/chapitre-23/",
"https://epsilonscan.fr/manga/not-safe-for-work/chapitre-24/"
]
}
}
}

0 comments on commit 3ad71ef

Please sign in to comment.