|
37 | 37 | from minet.scrape.utils import BeautifulSoupWithoutXHTMLWarnings
|
38 | 38 | from minet.scrape.std import get_display_text
|
39 | 39 | from minet.facebook.utils import grab_facebook_cookie
|
40 |
| -from minet.facebook.formatters import ( |
41 |
| - FacebookComment, |
42 |
| - FacebookPost, |
43 |
| - FacebookUser, |
44 |
| - FacebookPostWithReaction, |
| 40 | +from minet.facebook.types import ( |
| 41 | + MobileFacebookComment, |
| 42 | + MobileFacebookUser, |
| 43 | + MobileFacebookPost, |
| 44 | + MobileFacebookPostWithReactions, |
45 | 45 | )
|
46 | 46 | from minet.facebook.exceptions import (
|
47 | 47 | FacebookInvalidCookieError,
|
@@ -80,7 +80,7 @@ def resolve_relative_url(url):
|
80 | 80 | return urljoin(FACEBOOK_MOBILE_URL, url)
|
81 | 81 |
|
82 | 82 |
|
83 |
| -def scrape_comments(html, direction=None, in_reply_to=None): |
| 83 | +def scrape_comments(html, direction=None, in_reply_to=None) -> MobileFacebookComment: |
84 | 84 | soup = BeautifulSoupWithoutXHTMLWarnings(html, "lxml")
|
85 | 85 |
|
86 | 86 | data = {
|
@@ -139,7 +139,7 @@ def scrape_comments(html, direction=None, in_reply_to=None):
|
139 | 139 | )
|
140 | 140 |
|
141 | 141 | for item in valid_items:
|
142 |
| - item_id = item.get("id") |
| 142 | + item_id = item["id"] |
143 | 143 |
|
144 | 144 | # Skipping comment if same as commented
|
145 | 145 | if item_id == in_reply_to:
|
@@ -206,7 +206,7 @@ def scrape_comments(html, direction=None, in_reply_to=None):
|
206 | 206 | data["replies"].append((resolve_relative_url(replies_url), item_id))
|
207 | 207 |
|
208 | 208 | data["comments"].append(
|
209 |
| - FacebookComment( |
| 209 | + MobileFacebookComment( |
210 | 210 | post_id=post_id,
|
211 | 211 | id=item_id,
|
212 | 212 | user_id=getattr(user, "id", ""),
|
@@ -302,7 +302,7 @@ def scrape_posts(html):
|
302 | 302 | else None
|
303 | 303 | )
|
304 | 304 |
|
305 |
| - post = FacebookPost( |
| 305 | + post = MobileFacebookPost( |
306 | 306 | url=post_url,
|
307 | 307 | user_id=getattr(user, "id", ""),
|
308 | 308 | user_handle=getattr(user, "handle", ""),
|
@@ -401,7 +401,7 @@ def scrape_video(soup):
|
401 | 401 | else None
|
402 | 402 | )
|
403 | 403 |
|
404 |
| - post = FacebookPostWithReaction( |
| 404 | + post = MobileFacebookPostWithReactions( |
405 | 405 | url=video_url,
|
406 | 406 | user_id=getattr(user, "id", ""),
|
407 | 407 | user_handle=getattr(user, "handle", ""),
|
@@ -492,7 +492,7 @@ def scrape_photo(soup):
|
492 | 492 | else None
|
493 | 493 | )
|
494 | 494 |
|
495 |
| - post = FacebookPostWithReaction( |
| 495 | + post = MobileFacebookPostWithReactions( |
496 | 496 | url=photo_url,
|
497 | 497 | user_id=getattr(user, "id", ""),
|
498 | 498 | user_handle=getattr(user, "handle", ""),
|
@@ -597,7 +597,7 @@ def scrape_post(html):
|
597 | 597 | else None
|
598 | 598 | )
|
599 | 599 |
|
600 |
| - post = FacebookPostWithReaction( |
| 600 | + post = MobileFacebookPostWithReactions( |
601 | 601 | url=post_url,
|
602 | 602 | user_id=getattr(user, "id", ""),
|
603 | 603 | user_handle=getattr(user, "handle", ""),
|
@@ -765,8 +765,8 @@ def post_author(self, url):
|
765 | 765 | user_label = user_item.get_text().strip()
|
766 | 766 |
|
767 | 767 | if isinstance(parsed, ParsedFacebookHandle):
|
768 |
| - return FacebookUser(user_label, None, parsed.handle, parsed.url) |
| 768 | + return MobileFacebookUser(user_label, None, parsed.handle, parsed.url) |
769 | 769 | elif isinstance(parsed, ParsedFacebookUser):
|
770 |
| - return FacebookUser(user_label, parsed.id, parsed.handle, parsed.url) |
| 770 | + return MobileFacebookUser(user_label, parsed.id, parsed.handle, parsed.url) |
771 | 771 | else:
|
772 | 772 | raise TypeError
|
0 commit comments