Skip to content

Commit

Permalink
Merge pull request #2198 from CryZFix/dev
Browse files Browse the repository at this point in the history
Fix Source
  • Loading branch information
dipu-bd authored Nov 14, 2023
2 parents 4c9ae33 + 3a4383b commit 8ee2aaf
Showing 1 changed file with 14 additions and 2 deletions.
16 changes: 14 additions & 2 deletions sources/en/s/shanghaifantasy.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import logging
import re

from lncrawl.core.crawler import Crawler

Expand All @@ -15,7 +16,10 @@ def read_novel_info(self):
soup = self.get_soup(self.novel_url)

novel_id = soup.select_one("div#likebox").attrs["data-novel"]
total_chapters = soup.select("div.grid p.text-sm")[1].text.split(": ")[1]
total_chapters_text = soup.select("div.grid p.text-sm")[1].text.split(": ")[1]
total_chapters = sum(int(num) for num in re.findall(r"\b\d+\b", total_chapters_text))
if total_chapters == 0:
total_chapters = 999
get_novel_json = self.get_response(self.wp_json_novel % novel_id).json()

novel_title = get_novel_json["title"]["rendered"]
Expand Down Expand Up @@ -48,5 +52,13 @@ def read_novel_info(self):

def download_chapter_body(self, chapter):
soup = self.get_soup(chapter["url"])
content = soup.select_one("div.contenta")
possible_chap_id = soup.select_one("a.comment-reply-link")
if possible_chap_id:
chap_id = possible_chap_id.attrs["data-postid"]
else:
possible_chap_id = soup.select_one("input#comment_post_ID")
chap_id = possible_chap_id.attrs["value"]
data = self.get_json("https://shanghaifantasy.com/wp-json/wp/v2/posts/%s" % chap_id)["content"]["rendered"]
soup = self.make_soup(data.replace("\n", " "))
content = soup.find("body")
return self.cleaner.extract_contents(content)

0 comments on commit 8ee2aaf

Please sign in to comment.