Skip to content

Commit 7858108

Browse files
dreulavelleSpoked
and
Spoked
authored
Fix/parser/add attribute (#179)
* increase ratelimits on second_limiters * Iceberg works. All scrapers working together. Symlinking works. --------- Co-authored-by: Spoked <Spoked@localhost>
1 parent b667931 commit 7858108

File tree

13 files changed

+161
-91
lines changed

13 files changed

+161
-91
lines changed

backend/program/__init__.py

+10-6
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,13 @@ def validate(self):
5858
return all(service.initialized for service in self.core_manager.services)
5959

6060
def stop(self):
61-
for service in self.core_manager.services:
62-
if getattr(service, "running", False):
63-
service.stop()
64-
self.pickly.stop()
65-
settings.save()
66-
self.running = False
61+
try:
62+
for service in self.core_manager.services:
63+
if getattr(service, "running", False):
64+
service.stop()
65+
self.pickly.stop()
66+
settings.save()
67+
self.running = False
68+
except Exception as e:
69+
logger.error("Iceberg stopping with exception: %s", e)
70+
pass

backend/program/content/plex_watchlist.py

+21-5
Original file line numberDiff line numberDiff line change
@@ -29,26 +29,32 @@ def __init__(self, media_items: MediaItemContainer):
2929
self.media_items = media_items
3030
self.prev_count = 0
3131
self.updater = Trakt()
32+
self.not_found_ids = []
3233

3334
def validate_settings(self):
3435
if not self.settings.enabled:
3536
logger.debug("Plex Watchlists is set to disabled.")
3637
return False
3738
if self.settings.rss:
39+
logger.info("Found Plex RSS URL. Validating...")
3840
try:
3941
response = ping(self.settings.rss)
4042
if response.ok:
4143
self.rss_enabled = True
44+
logger.info("Plex RSS URL is valid.")
4245
return True
4346
else:
44-
logger.warn(f"Plex RSS URL is not reachable. Falling back to normal Watchlist.")
47+
logger.info(f"Plex RSS URL is not valid. Falling back to watching user Watchlist.")
4548
return True
4649
except HTTPError as e:
4750
if e.response.status_code in [404]:
48-
logger.error("Plex RSS URL is invalid. Falling back to normal Watchlist.")
51+
logger.warn("Plex RSS URL is Not Found. Falling back to watching user Watchlist.")
4952
return True
50-
if e.response.status_code >= 400 and e.response.status_code <= 500:
51-
logger.warn(f"Plex RSS URL is not reachable. Falling back to normal Watchlist.")
53+
if e.response.status_code >= 400 and e.response.status_code <= 499:
54+
logger.warn(f"Plex RSS URL is not reachable. Falling back to watching user Watchlist.")
55+
return True
56+
if e.response.status_code >= 500:
57+
logger.error(f"Plex is having issues validating RSS feed. Falling back to watching user Watchlist.")
5258
return True
5359
except Exception as e:
5460
logger.exception("Failed to validate Plex RSS URL: %s", e)
@@ -60,6 +66,13 @@ def run(self):
6066
if they are not already there"""
6167
items = self._create_unique_list()
6268
new_items = [item for item in items if item not in self.media_items] or []
69+
if len(new_items) == 0:
70+
logger.debug("No new items found in Plex Watchlist")
71+
return
72+
for check in new_items:
73+
if check is None:
74+
new_items.remove(check)
75+
self.not_found_ids.append(check)
6376
container = self.updater.create_items(new_items)
6477
for item in container:
6578
item.set("requested_by", "Plex Watchlist")
@@ -76,6 +89,9 @@ def run(self):
7689
logger.info("Added %s", item.log_string)
7790
elif length > 5:
7891
logger.info("Added %s items", length)
92+
if len(self.not_found_ids) >= 1 and len(self.not_found_ids) <= 5:
93+
for item in self.not_found_ids:
94+
logger.info("Failed to add %s", item)
7995

8096
def _create_unique_list(self):
8197
"""Create a unique list of items from Plex RSS and Watchlist"""
@@ -88,7 +104,7 @@ def _create_unique_list(self):
88104
def _get_items_from_rss(self) -> list:
89105
"""Fetch media from Plex RSS Feed"""
90106
try:
91-
response_obj = get(self.settings.rss, timeout=30)
107+
response_obj = get(self.settings.rss, timeout=60)
92108
data = json.loads(response_obj.response.content)
93109
items = data.get("items", [])
94110
ids = [

backend/program/media/container.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import threading
33
import dill
44
from typing import List, Optional
5+
from utils.logger import logger
56
from program.media.item import MediaItem
67

78

@@ -24,7 +25,12 @@ def __iadd__(self, other):
2425
return self
2526

2627
def sort(self, by, reverse):
27-
self.items.sort(key=lambda item: item.get(by), reverse=reverse)
28+
"""Sort container by given attribute"""
29+
try:
30+
self.items.sort(key=lambda item: item.get(by), reverse=reverse)
31+
except AttributeError as e:
32+
logger.error("Failed to sort container: %s", e)
33+
pass
2834

2935
def __len__(self):
3036
"""Get length of container"""

backend/program/media/item.py

+1-14
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
from datetime import datetime
21
import threading
2+
from datetime import datetime
33
from program.media.state import (
44
Unknown,
55
Content,
@@ -112,19 +112,6 @@ def to_extended_dict(self):
112112
dict["language"] = (self.language if hasattr(self, "language") else None,)
113113
dict["country"] = (self.country if hasattr(self, "country") else None,)
114114
dict["network"] = (self.network if hasattr(self, "network") else None,)
115-
dict["streams"] = (self.streams if hasattr(self, "streams") else None,)
116-
dict["active_stream"] = (
117-
self.active_stream if hasattr(self, "active_stream") else None
118-
,)
119-
dict["symlinked"] = (self.symlinked if hasattr(self, "symlinked") else None,)
120-
dict["parsed"] = (self.parsed if hasattr(self, "parsed") else None,)
121-
dict["parsed_data"] = (self.parsed_data if hasattr(self, "parsed_data") else None,)
122-
dict["is_anime"] = (self.is_anime if hasattr(self, "is_anime") else None,)
123-
dict["update_folder"] = (
124-
self.update_folder if hasattr(self, "update_folder") else None
125-
,)
126-
dict["file"] = (self.file if hasattr(self, "file") else None,)
127-
dict["folder"] = (self.folder if hasattr(self, "folder") else None,)
128115
return dict
129116

130117
def __iter__(self):

backend/program/plex.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from datetime import datetime
88
from typing import Optional
99
from plexapi.server import PlexServer
10-
from plexapi.exceptions import BadRequest
10+
from plexapi.exceptions import BadRequest, Unauthorized
1111
from pydantic import BaseModel
1212
# from program.updaters.trakt import get_imdbid_from_tvdb
1313
from utils.logger import logger
@@ -46,11 +46,14 @@ def __init__(self, media_items: MediaItemContainer):
4646
self.plex = PlexServer(
4747
self.settings.url, self.settings.token, timeout=60
4848
)
49+
except Unauthorized:
50+
logger.warn("Plex is not authorized!")
51+
return
4952
except BadRequest as e:
5053
logger.error("Plex is not configured correctly: %s", e)
5154
return
52-
except Exception:
53-
logger.error("Plex is not configured!")
55+
except Exception as e:
56+
logger.error("Plex exception thrown: %s", e)
5457
return
5558
self.running = False
5659
self.log_worker_count = False

backend/program/realdebrid.py

+3
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,9 @@ def chunks(lst, n):
130130
"active_stream",
131131
{"hash": stream_hash, "files": wanted_files, "id": None},
132132
)
133+
all_filenames = [file_info["filename"] for file_info in wanted_files.values()]
134+
for file in all_filenames:
135+
logger.debug(f"Found cached file {file} for {item.log_string}")
133136
return True
134137
item.streams[stream_hash] = None
135138
return False

backend/program/scrapers/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def __init__(self, _):
1919
self.key = "scraping"
2020
self.initialized = False
2121
self.settings = ScrapingConfig(**settings.get(self.key))
22-
self.sm = ServiceManager(None, False, Torrentio, Orionoid, Jackett)
22+
self.sm = ServiceManager(None, False, Orionoid, Torrentio, Jackett)
2323
if not any(service.initialized for service in self.sm.services):
2424
logger.error(
2525
"You have no scraping services enabled, please enable at least one!"

backend/program/scrapers/jackett.py

+19-12
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
""" Jackett scraper module """
2+
import traceback
23
from typing import Optional
34
from pydantic import BaseModel
45
from requests import ReadTimeout, RequestException
56
from utils.logger import logger
67
from utils.settings import settings_manager
78
from utils.parser import parser
8-
from utils.request import RateLimitExceeded, get, RateLimiter
9+
from utils.request import RateLimitExceeded, get, RateLimiter, ping
910

1011

1112
class JackettConfig(BaseModel):
@@ -24,9 +25,9 @@ def __init__(self, _):
2425
self.initialized = self.validate_settings()
2526
if not self.initialized and not self.api_key:
2627
return
27-
self.parse_logging = False
2828
self.minute_limiter = RateLimiter(max_calls=60, period=60, raise_on_limit=True)
2929
self.second_limiter = RateLimiter(max_calls=1, period=10)
30+
self.parse_logging = False
3031
logger.info("Jackett initialized!")
3132

3233
def validate_settings(self) -> bool:
@@ -38,8 +39,8 @@ def validate_settings(self) -> bool:
3839
self.api_key = self.settings.api_key
3940
try:
4041
url = f"{self.settings.url}/api/v2.0/indexers/!status:failing,test:passed/results/torznab?apikey={self.api_key}&cat=2000&t=movie&q=test"
41-
response = get(url=url, retry_if_failed=False, timeout=60)
42-
if response.is_ok:
42+
response = ping(url=url, timeout=60)
43+
if response.ok:
4344
return True
4445
except ReadTimeout:
4546
return True
@@ -65,18 +66,22 @@ def validate_settings(self) -> bool:
6566

6667
def run(self, item):
6768
"""Scrape Jackett for the given media items"""
69+
if item is None or not self.initialized:
70+
return
6871
try:
6972
self._scrape_item(item)
70-
except RequestException:
73+
except RateLimitExceeded as e:
7174
self.minute_limiter.limit_hit()
72-
logger.debug("Jackett connection timeout for item: %s", item.log_string)
75+
logger.warn("Jackett rate limit hit for item: %s", item.log_string)
7376
return
74-
except RateLimitExceeded:
77+
except RequestException as e:
7578
self.minute_limiter.limit_hit()
76-
logger.debug("Jackett rate limit hit for item: %s", item.log_string)
79+
logger.exception("Jackett request exception: %s", e, exc_info=True)
7780
return
7881
except Exception as e:
79-
logger.exception("Jackett exception for item: %s - Exception: %s", item.log_string, e)
82+
self.minute_limiter.limit_hit()
83+
# logger.debug("Jackett exception for item: %s - Exception: %s", item.log_string, e.args[0], exc_info=True)
84+
# logger.debug("Exception details: %s", traceback.format_exc())
8085
return
8186

8287
def _scrape_item(self, item):
@@ -105,8 +110,11 @@ def api_scrape(self, item):
105110
if response.is_ok:
106111
data = {}
107112
streams = response.data["rss"]["channel"].get("item", [])
108-
parsed_data_list = [parser.parse(item, stream.get("title")) for stream in streams]
113+
parsed_data_list = [parser.parse(item, stream.get("title")) for stream in streams if type(stream) != str]
109114
for stream, parsed_data in zip(streams, parsed_data_list):
115+
if type(stream) == str:
116+
logger.debug("Found another string: %s", stream)
117+
continue
110118
if parsed_data.get("fetch", True) and parsed_data.get("title_match", False):
111119
attr = stream.get("torznab:attr", [])
112120
infohash_attr = next((a for a in attr if a.get("@name") == "infohash"), None)
@@ -118,6 +126,5 @@ def api_scrape(self, item):
118126
logger.debug("Jackett Fetch: %s - Parsed item: %s", parsed_data["fetch"], parsed_data["string"])
119127
if data:
120128
item.parsed_data.extend(parsed_data_list)
121-
item.parsed_data.append({self.key: True})
122129
return data, len(streams)
123-
return {}, len(streams) or 0
130+
return {}, 0

backend/program/scrapers/orionoid.py

+34-10
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,10 @@ def __init__(self, _):
2929
self.initialized = True
3030
else:
3131
return
32+
self.orionoid_limit = 0
33+
self.orionoid_remaining = 0
3234
self.parse_logging = False
33-
self.max_calls = 50 if not self.is_premium else 60
35+
self.max_calls = 100 if not self.is_premium else 60
3436
self.period = 86400 if not self.is_premium else 60
3537
self.minute_limiter = RateLimiter(max_calls=self.max_calls, period=self.period, raise_on_limit=True)
3638
self.second_limiter = RateLimiter(max_calls=1, period=10)
@@ -43,8 +45,20 @@ def validate_settings(self) -> bool:
4345
return False
4446
if self.settings.api_key:
4547
return True
46-
logger.info("Orionoid is not configured and will not be used.")
47-
return False
48+
try:
49+
url = f"https://api.orionoid.com?keyapp={KEY_APP}&keyuser={self.settings.api_key}&mode=user&action=retrieve"
50+
response = get(url, retry_if_failed=False)
51+
if response.is_ok:
52+
return True
53+
if not response.data.result.status == "success":
54+
logger.error(f"Orionoid API Key is invalid. Status: {response.data.result.status}")
55+
return False
56+
if not response.is_ok:
57+
logger.error(f"Orionoid Status Code: {response.status_code}, Reason: {response.reason}")
58+
return False
59+
except Exception as e:
60+
logger.exception("Orionoid failed to initialize: %s", e)
61+
return False
4862

4963
def check_premium(self) -> bool:
5064
"""
@@ -73,15 +87,19 @@ def run(self, item):
7387
self._scrape_item(item)
7488
except ConnectTimeout:
7589
self.minute_limiter.limit_hit()
76-
logger.debug("Orionoid connection timeout for item: %s", item.log_string)
90+
logger.warn("Orionoid connection timeout for item: %s", item.log_string)
7791
return
78-
except RequestException:
92+
except RequestException as e:
7993
self.minute_limiter.limit_hit()
80-
logger.debug("Orionoid request exception for item: %s", item.log_string)
94+
logger.exception("Orionoid request exception: %s", e)
8195
return
8296
except RateLimitExceeded:
8397
self.minute_limiter.limit_hit()
84-
logger.debug("Orionoid rate limit hit for item: %s", item.log_string)
98+
logger.warn("Orionoid rate limit hit for item: %s", item.log_string)
99+
return
100+
except Exception as e:
101+
self.minute_limiter.limit_hit()
102+
logger.exception("Orionoid exception for item: %s - Exception: %s", item.log_string, e)
85103
return
86104

87105
def _scrape_item(self, item):
@@ -133,7 +151,14 @@ def api_scrape(self, item):
133151

134152
with self.second_limiter:
135153
response = get(url, retry_if_failed=False, timeout=60)
136-
if response.is_ok and len(response.data.data.streams) > 0:
154+
if response.is_ok and hasattr(response.data, "data"):
155+
156+
# Check and log Orionoid API limits
157+
# self.orionoid_limit = response.data.data.requests.daily.limit
158+
# self.orionoid_remaining = response.data.data.requests.daily.remaining
159+
# if self.orionoid_remaining < 10:
160+
# logger.warning(f"Orionoid API limit is low. Limit: {self.orionoid_limit}, Remaining: {self.orionoid_remaining}")
161+
137162
parsed_data_list = [
138163
parser.parse(item, stream.file.name)
139164
for stream in response.data.data.streams
@@ -149,6 +174,5 @@ def api_scrape(self, item):
149174
logger.debug("Orionoid Fetch: %s - Parsed item: %s", parsed_data["fetch"], parsed_data["string"])
150175
if data:
151176
item.parsed_data.extend(parsed_data_list)
152-
item.parsed_data.append({self.key: True})
153177
return data, len(response.data.data.streams)
154-
return {}, len(response.data.data.streams) or 0
178+
return {}, 0

0 commit comments

Comments
 (0)