Skip to content

Commit db29f42

Browse files
committed
Rename Scraping to Scraper to be consistent across platform
1 parent 3b001a3 commit db29f42

File tree

10 files changed

+44
-46
lines changed

10 files changed

+44
-46
lines changed

backend/program/__init__.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import threading
44
import time
55
import concurrent.futures
6-
from program.scrapers import Scraping
6+
from program.scrapers import Scraper
77
from program.realdebrid import Debrid
88
from program.symlink import Symlinker
99
from program.media.container import MediaItemContainer
@@ -37,7 +37,7 @@ def start(self):
3737
if not self.startup_args.dev:
3838
self.pickly = Pickly(self.media_items, self.data_path)
3939
self.pickly.start()
40-
self.core_manager = ServiceManager(self.media_items, True, Content, Plex, Scraping, Debrid, Symlinker)
40+
self.core_manager = ServiceManager(self.media_items, True, Content, Plex, Scraper, Debrid, Symlinker)
4141
if self.validate():
4242
logger.info("Iceberg started!")
4343
else:

backend/program/media/state.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def perform_action(self, _):
2121

2222
class Content(MediaItemState):
2323
def perform_action(self, modules):
24-
scraper = next(module for module in modules if module.key == "scraping")
24+
scraper = next(module for module in modules if module.key == "scraper")
2525
if self.context.type in ["movie", "season", "episode"]:
2626
scraper.run(self.context)
2727
if self.context.state == Content and self.context.type == "season":

backend/program/scrapers/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -9,15 +9,15 @@
99
from program.scrapers.jackett import Jackett
1010

1111

12-
class Scraping:
12+
class Scraper:
1313
def __init__(self, _):
14-
self.key = "scraping"
14+
self.key = "scraper"
1515
self.initialized = False
1616
self.settings = settings_manager.settings.scraper
1717
self.sm = ServiceManager(None, False, Orionoid, Torrentio, Jackett)
1818
if not any(service.initialized for service in self.sm.services):
1919
logger.error(
20-
"You have no scraping services enabled, please enable at least one!"
20+
"You have no scraper services enabled, please enable at least one!"
2121
)
2222
return
2323
self.initialized = True

backend/program/settings/manager.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from pydantic import ValidationError
77

88
from utils import data_dir_path
9-
from program.settings.models import AppModel
9+
from program.settings.models import AppModel, NotifyingBaseModel
1010
from utils.logger import logger
1111
from utils.observable import Observable
1212

@@ -19,7 +19,7 @@ def __init__(self):
1919
self.filename = "settings.json"
2020
self.settings_file = data_dir_path / self.filename
2121

22-
AppModel.set_notify_observers(self.notify_observers)
22+
NotifyingBaseModel.set_notify_observers(self.notify_observers)
2323

2424
if not os.path.exists(self.settings_file):
2525
self.settings = AppModel()

backend/program/settings/models.py

+12-11
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
"""Iceberg settings models"""
22

33
from typing import Optional
4-
from pydantic import BaseModel, root_validator
4+
from pathlib import Path
55

6+
from pydantic import BaseModel, root_validator
67

78

89
class NotifyingBaseModel(BaseModel):
@@ -19,8 +20,8 @@ def set_notify_observers(cls, notify_observers_callable):
1920

2021
def __setattr__(self, name, value):
2122
super().__setattr__(name, value)
22-
if self.__class__.notify_observers:
23-
self.__class__.notify_observers()
23+
if self.__class__._notify_observers:
24+
self.__class__._notify_observers()
2425

2526
class PlexModel(NotifyingBaseModel):
2627
user: str = ""
@@ -31,8 +32,8 @@ class DebridModel(NotifyingBaseModel):
3132
api_key: str = ""
3233

3334
class SymlinkModel(NotifyingBaseModel):
34-
host_path: str = ""
35-
container_path: str = ""
35+
host_path: Path = Path()
36+
container_path: Path = Path()
3637

3738
# Content Services
3839
class ContentNotifyingBaseModel(NotifyingBaseModel):
@@ -79,17 +80,17 @@ class TorrentioConfig(NotifyingBaseModel):
7980
filter: str = "sort=qualitysize%7Cqualityfilter=480p,scr,cam,unknown"
8081

8182
class ScraperModel(NotifyingBaseModel):
82-
after_2: int = 0.5,
83-
after_5: int = 2,
84-
after_10: int = 24,
83+
after_2: float = 0.5
84+
after_5: int = 2
85+
after_10: int = 24
8586
jackett: JackettConfig = JackettConfig()
8687
orionoid: OrionoidConfig = OrionoidConfig()
8788
torrentio: TorrentioConfig = TorrentioConfig()
8889

8990
class ParserModel(NotifyingBaseModel):
90-
highest_quality: bool = False,
91-
include_4k: bool = False,
92-
repack_proper: bool = True,
91+
highest_quality: bool = False
92+
include_4k: bool = False
93+
repack_proper: bool = True
9394
language: list[str] = ["English"]
9495

9596

backend/program/symlink.py

+4-7
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,14 @@ def __init__(self, _):
2626
logger.error("Symlink initialization failed due to invalid configuration.")
2727
return
2828
logger.info("Rclone path symlinks are pointed to: %s", self.settings.host_path)
29-
logger.info("Symlinks will be placed in: %s", self.library_path)
29+
logger.info("Symlinks will be placed in: %s", self.settings.container_path)
3030
logger.info("Symlink initialized!")
3131
self.initialized = True
3232

3333
def validate(self):
3434
"""Validate paths and create the initial folders."""
35-
host_path = Path(self.settings.host_path) if self.settings.host_path else None
36-
container_path = Path(self.settings.container_path) if self.settings.container_path else None
35+
host_path = self.settings.host_path
36+
container_path = self.settings.container_path
3737
if not host_path or not container_path or host_path == Path('.') or container_path == Path('.'):
3838
logger.error("Host or container path not provided, is empty, or is set to the current directory.")
3939
return False
@@ -47,9 +47,6 @@ def validate(self):
4747
if not host_path.is_dir():
4848
logger.error(f"Host path is not a directory or does not exist: {host_path}")
4949
return False
50-
# if not container_path.is_dir():
51-
# logger.error(f"Container path is not a directory or does not exist: {container_path}")
52-
# return False
5350
if Path(self.settings.host_path / "__all__").exists() and Path(self.settings.host_path / "__all__").is_dir():
5451
logger.debug("Detected Zurg host path. Using __all__ folder for host path.")
5552
self.settings.host_path = self.settings.host_path / "__all__"
@@ -71,7 +68,7 @@ def validate(self):
7168
def create_initial_folders(self):
7269
"""Create the initial library folders."""
7370
try:
74-
self.library_path = self.settings.host_path.parent / "library"
71+
self.library_path = self.settings.container_path / "library"
7572
self.library_path_movies = self.library_path / "movies"
7673
self.library_path_shows = self.library_path / "shows"
7774
self.library_path_anime_movies = self.library_path / "anime_movies"

backend/utils/default_settings.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@
3838
"api_key": ""
3939
}
4040
},
41-
"scraping": {
41+
"scraper": {
4242
"after_2": 0.5,
4343
"after_5": 2,
4444
"after_10": 24,

frontend/src/hooks.server.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ const onboarding: Handle = async ({ event, resolve }) => {
99
if (!data.success || !data.data) {
1010
error(500, 'API Error');
1111
}
12-
const toCheck = ['content', 'scraping', 'plex', 'real_debrid', 'symlink'];
12+
const toCheck = ['content', 'scraper', 'plex', 'real_debrid', 'symlink'];
1313
const allServicesTrue: boolean = toCheck.every((service) => data.data[service] === true);
1414
if (!allServicesTrue) {
1515
redirect(302, '/onboarding');

frontend/src/lib/forms/helpers.ts

+14-14
Original file line numberDiff line numberDiff line change
@@ -216,8 +216,8 @@ export function mediaServerSettingsToSet(form: SuperValidated<MediaServerSetting
216216
}
217217

218218
// Scrapers Settings -----------------------------------------------------------------------------------
219-
export const scrapersSettingsToGet: string[] = ['scraping'];
220-
export const scrapersSettingsServices: string[] = ['scraping'];
219+
export const scrapersSettingsToGet: string[] = ['scraper'];
220+
export const scrapersSettingsServices: string[] = ['scraper'];
221221

222222
export const scrapersSettingsSchema = z.object({
223223
after_2: z.number().nonnegative().default(0.5),
@@ -239,24 +239,24 @@ export type ScrapersSettingsSchema = typeof scrapersSettingsSchema;
239239

240240
export function scrapersSettingsToPass(data: any) {
241241
return {
242-
after_2: data.data.scraping.after_2,
243-
after_5: data.data.scraping.after_5,
244-
after_10: data.data.scraping.after_10,
245-
torrentio_url: data.data.scraping.torrentio?.url || 'https://torrentio.strem.fun',
246-
torrentio_enabled: data.data.scraping.torrentio.enabled,
247-
orionoid_enabled: data.data.scraping.orionoid.enabled,
248-
jackett_enabled: data.data.scraping.jackett.enabled,
249-
torrentio_filter: data.data.scraping.torrentio?.filter || '',
250-
orionoid_api_key: data.data.scraping.orionoid?.api_key || '',
251-
jackett_url: data.data.scraping.jackett?.url || '',
252-
jackett_api_key: data.data.scraping.jackett?.api_key || ''
242+
after_2: data.data.scraper.after_2,
243+
after_5: data.data.scraper.after_5,
244+
after_10: data.data.scraper.after_10,
245+
torrentio_url: data.data.scraper.torrentio?.url || 'https://torrentio.strem.fun',
246+
torrentio_enabled: data.data.scraper.torrentio.enabled,
247+
orionoid_enabled: data.data.scraper.orionoid.enabled,
248+
jackett_enabled: data.data.scraper.jackett.enabled,
249+
torrentio_filter: data.data.scraper.torrentio?.filter || '',
250+
orionoid_api_key: data.data.scraper.orionoid?.api_key || '',
251+
jackett_url: data.data.scraper.jackett?.url || '',
252+
jackett_api_key: data.data.scraper.jackett?.api_key || ''
253253
};
254254
}
255255

256256
export function scrapersSettingsToSet(form: SuperValidated<ScrapersSettingsSchema>) {
257257
return [
258258
{
259-
key: 'scraping',
259+
key: 'scraper',
260260
value: {
261261
after_2: form.data.after_2,
262262
after_5: form.data.after_5,

frontend/src/routes/+page.svelte

+4-4
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@
77
88
export let data: PageData;
99
10-
const MandatoryServices = ['plex', 'content', 'scraping', 'real_debrid', 'symlink'];
10+
const MandatoryServices = ['plex', 'content', 'scraper', 'real_debrid', 'symlink'];
1111
const ContentServices = ['mdblist', 'overseerr', 'plex_watchlist'];
12-
const ScrapingServices = ['torrentio', 'jackett', 'orionoid'];
12+
const ScraperServices = ['torrentio', 'jackett', 'orionoid'];
1313
1414
function sortServices(services: string[], data: Record<string, boolean>) {
1515
let sortedData = {} as Record<string, boolean>;
@@ -53,8 +53,8 @@
5353
<h2 class="text-lg md:text-xl font-semibold">Content services</h2>
5454
<ServiceStatus statusData={sortServices(ContentServices, services.data)} />
5555
<br />
56-
<h2 class="text-lg md:text-xl font-semibold">Scraping services</h2>
57-
<ServiceStatus statusData={sortServices(ScrapingServices, services.data)} />
56+
<h2 class="text-lg md:text-xl font-semibold">Scraper services</h2>
57+
<ServiceStatus statusData={sortServices(ScraperServices, services.data)} />
5858
{:catch}
5959
<p class="text-muted-foreground">Failed to fetch services status</p>
6060
{/await}

0 commit comments

Comments
 (0)