From b4298877e2a80756085d08626b02b6a465d8a8d4 Mon Sep 17 00:00:00 2001 From: CasVT Date: Thu, 23 Nov 2023 17:45:23 +0100 Subject: [PATCH] Added support for downloading torrents from GC (#48) --- backend/custom_exceptions.py | 113 ++++- backend/db.py | 82 ++- backend/download.py | 292 ----------- ..._clients.py => download_direct_clients.py} | 207 ++++---- backend/download_general.py | 308 ++++++++++++ backend/download_queue.py | 470 +++++++++++++++++ backend/download_torrent_clients.py | 294 +++++++++++ backend/getcomics.py | 164 +++--- backend/post_processing.py | 472 ++++++++++-------- backend/settings.py | 29 +- backend/tasks.py | 32 +- backend/torrent_clients/qBittorrent.py | 124 +++++ frontend/api.py | 120 ++++- frontend/static/css/queue.css | 2 +- frontend/static/css/settings.css | 147 +++++- frontend/static/css/window.css | 3 +- .../static/js/settings_download_clients.js | 317 ++++++++++++ frontend/templates/settings_download.html | 11 +- .../templates/settings_download_clients.html | 163 ++++++ frontend/templates/settings_general.html | 1 + .../templates/settings_mediamanagement.html | 1 + frontend/ui.py | 4 + 22 files changed, 2632 insertions(+), 724 deletions(-) delete mode 100644 backend/download.py rename backend/{download_clients.py => download_direct_clients.py} (59%) create mode 100644 backend/download_general.py create mode 100644 backend/download_queue.py create mode 100644 backend/download_torrent_clients.py create mode 100644 backend/torrent_clients/qBittorrent.py create mode 100644 frontend/static/js/settings_download_clients.js create mode 100644 frontend/templates/settings_download_clients.html diff --git a/backend/custom_exceptions.py b/backend/custom_exceptions.py index 1b8e809c..2fabf0d1 100644 --- a/backend/custom_exceptions.py +++ b/backend/custom_exceptions.py @@ -22,7 +22,8 @@ def __init__(self) -> None: return class RootFolderInUse(Exception): - """A root folder with the given id is requested to be deleted but is used by a volume + """A root folder with the given id is requested to be deleted + but is used by a volume """ api_response = {'error': 'RootFolderInUse', 'result': {}, 'code': 400} @@ -40,7 +41,8 @@ def __init__(self) -> None: return class VolumeNotMatched(Exception): - """The volume with the given id was found in the database but the comicvine id returned nothing + """The volume with the given id was found in the database + but the comicvine id returned nothing """ api_response = {'error': 'VolumeNotMatched', 'result': {}, 'code': 400} @@ -77,7 +79,11 @@ def __init__(self, volume_id: int): @property def api_response(self): - return {'error': 'VoolumeDownloadedFor', 'result': {'volume_id': self.volume_id}, 'code': 400} + return { + 'error': 'VolumeDownloadedFor', + 'result': {'volume_id': self.volume_id}, + 'code': 400 + } class IssueNotFound(Exception): """The issue with the given id was not found @@ -144,7 +150,14 @@ def __init__(self, reason_id: int, reason_text: str): @property def api_response(self): - return {'error': 'LinkBroken', 'result': {'reason_text': self.reason_text, 'reason_id': self.reason_id}, 'code': 400} + return { + 'error': 'LinkBroken', + 'result': { + 'reason_text': self.reason_text, + 'reason_id': self.reason_id + }, + 'code': 400 + } class InvalidSettingKey(Exception): """The setting key is unknown @@ -157,7 +170,11 @@ def __init__(self, key: str=''): @property def api_response(self): - return {'error': 'InvalidSettingKey', 'result': {'key': self.key}, 'code': 400} + return { + 'error': 'InvalidSettingKey', + 'result': {'key': self.key}, + 'code': 400 + } class InvalidSettingValue(Exception): """The setting value is invalid @@ -171,7 +188,11 @@ def __init__(self, key: str='', value: str=''): @property def api_response(self): - return {'error': 'InvalidSettingValue', 'result': {'key': self.key, 'value': self.value}, 'code': 400} + return { + 'error': 'InvalidSettingValue', + 'result': {'key': self.key, 'value': self.value}, + 'code': 400 + } class InvalidSettingModification(Exception): """The setting is not allowed to be changed this way @@ -180,12 +201,18 @@ def __init__(self, key: str='', instead: str=''): self.key = key self.instead = instead super().__init__(key) - logging.warning(f'This setting is not allowed to be changed this way: {key}. Instead: {instead}') + logging.warning( + f'This setting is not allowed to be changed this way: {key}.' + + f' Instead: {instead}') return @property def api_response(self): - return {'error': 'InvalidSettingModification', 'result': {'key': self.key, 'instead': self.instead}, 'code': 400} + return { + 'error': 'InvalidSettingModification', + 'result': {'key': self.key, 'instead': self.instead}, + 'code': 400 + } class KeyNotFound(Exception): """A key that is required to be given in the api request was not found @@ -194,7 +221,10 @@ def __init__(self, key: str=''): self.key = key super().__init__(self.key) if key != 'password': - logging.warning(f'This key was not found in the API request, eventhough it\'s required: {key}') + logging.warning( + "This key was not found in the API request," + + f" eventhough it's required: {key}" + ) return @property @@ -209,12 +239,19 @@ def __init__(self, key: str='', value: str=''): self.value = value super().__init__(self.key) if value not in ('undefined', 'null'): - logging.warning(f'This key in the API request has an invalid value: {key} = {value}') + logging.warning( + 'This key in the API request has an invalid value: ' + + f'{key} = {value}' + ) return @property def api_response(self): - return {'error': 'InvalidKeyValue', 'result': {'key': self.key, 'value': self.value}, 'code': 400} + return { + 'error': 'InvalidKeyValue', + 'result': {'key': self.key, 'value': self.value}, + 'code': 400 + } class CredentialNotFound(Exception): """The credential with the given id was not found @@ -235,7 +272,11 @@ def __init__(self, string: str) -> None: @property def api_response(self): - return {'error': 'CredentialSourceNotFound', 'result': {'string': self.string}, 'code': 404} + return { + 'error': 'CredentialSourceNotFound', + 'result': {'string': self.string}, + 'code': 404 + } class CredentialAlreadyAdded(Exception): """A credential for the given source is already added @@ -259,10 +300,52 @@ class DownloadLimitReached(Exception): """ def __init__(self, string: str) -> None: self.string = string - logging.warning(f'Credential source {string} has reached it\'s download limit') + logging.warning(f"Credential source {string} has reached it's download limit") return @property def api_response(self): - return {'error': 'DownloadLimitReached', 'result': {'string': self.string}, 'code': 509} - \ No newline at end of file + return { + 'error': 'DownloadLimitReached', + 'result': {'string': self.string}, + 'code': 509 + } + +class TorrentClientNotFound(Exception): + """The torrent client with the given ID was not found + """ + api_response = {'error': 'TorrentClientNotFound', 'result': {}, 'code': 404} + + def __init__(self) -> None: + logging.warning('Torrent client with given id not found') + return + +class TorrentClientDownloading(Exception): + """The torrent client is desired to be deleted + but there is a torrent downloading with it + """ + def __init__(self, torrent_client_id: int): + self.torrent_client_id = torrent_client_id + super().__init__(self.torrent_client_id) + logging.warning( + f'Deleting torrent client failed because there is ' + + f'a torrent downloading with it: {self.torrent_client_id}' + ) + return + + @property + def api_response(self): + return { + 'error': 'TorrentClientDownloading', + 'result': {'torrent_client_id': self.torrent_client_id}, + 'code': 400 + } + +class TorrentClientNotWorking(Exception): + """The torrent client is not working + """ + api_response = {'error': 'TorrentClientNotWorking', 'result': {}, 'code': 400} + + def __init__(self) -> None: + logging.warning('Torrent client is not working') + return diff --git a/backend/db.py b/backend/db.py index 9506a627..8db80fe9 100644 --- a/backend/db.py +++ b/backend/db.py @@ -16,7 +16,7 @@ from backend.logging import set_log_level __DATABASE_FILEPATH__ = 'db', 'Kapowarr.db' -__DATABASE_VERSION__ = 11 +__DATABASE_VERSION__ = 12 class Singleton(type): _instances = {} @@ -93,7 +93,8 @@ def __repr__(self) -> str: return f'<{self.__class__.__name__}; {current_thread().name}; {id(self)}>' def set_db_location(db_file_location: str) -> None: - """Setup database location. Create folder for database and set location for db.DBConnection + """Setup database location. Create folder for database + and set location for db.DBConnection Args: db_file_location (str): The absolute path to the database file @@ -112,8 +113,12 @@ def get_db(output_type='tuple', temp: bool=False): """Get a database cursor instance or create a new one if needed Args: - output_type ('tuple'|'dict', optional): The type of output the cursor should have. Defaults to 'tuple'. - temp (bool, optional): Decides if a new manually handled cursor is returned instead of the cached one. Defaults to False. + output_type ('tuple'|'dict', optional): The type of output of the cursor. + Defaults to 'tuple'. + + temp (bool, optional): Decides if a new manually handled cursor is returned + instead of the cached one. + Defaults to False. Returns: Cursor: Database cursor instance with desired output type set @@ -135,7 +140,7 @@ def get_db(output_type='tuple', temp: bool=False): return cursor def close_db(e: str=None): - """Close database cursor, commit database and close database (setup after each request) + """Close database cursor, commit database and close database. Args: e (str, optional): Error. Defaults to None. @@ -311,7 +316,10 @@ def migrate_db(current_db_version: int) -> None: str(v[0]) for v in cursor.execute("SELECT comicvine_id FROM volumes;") ] - updates = ((r['date_last_updated'], r['comicvine_id']) for r in ComicVine().fetch_volumes(volume_ids)) + updates = ( + (r['date_last_updated'], r['comicvine_id']) + for r in ComicVine().fetch_volumes(volume_ids) + ) cursor.executemany( "UPDATE volumes SET last_cv_update = ? WHERE comicvine_id = ?;", updates @@ -352,7 +360,10 @@ def migrate_db(current_db_version: int) -> None: GROUP BY v.id; """).fetchall() - updates = ((determine_special_version(v[1], v[2], v[3], v[4]) ,v[0]) for v in volumes) + updates = ( + (determine_special_version(v[1], v[2], v[3], v[4]) ,v[0]) + for v in volumes + ) cursor.executemany( "UPDATE volumes SET special_version = ? WHERE id = ?;", @@ -441,7 +452,10 @@ def migrate_db(current_db_version: int) -> None: (volume[0],) ) ] - updates.append((determine_special_version(volume[1], volume[2], issue_titles), volume[0])) + updates.append( + (determine_special_version(volume[1], volume[2], issue_titles), + volume[0]) + ) cursor.executemany( "UPDATE volumes SET special_version = ? WHERE id = ?;", @@ -451,6 +465,30 @@ def migrate_db(current_db_version: int) -> None: current_db_version = 11 update_db_version(current_db_version) + if current_db_version == 11: + # V11 -> V12 + cursor.executescript(""" + DROP TABLE download_queue; + + CREATE TABLE download_queue( + id INTEGER PRIMARY KEY, + client_type VARCHAR(255) NOT NULL, + torrent_client_id INTEGER, + + link TEXT NOT NULL, + filename_body TEXT NOT NULL, + source VARCHAR(25) NOT NULL, + + volume_id INTEGER NOT NULL, + issue_id INTEGER, + page_link TEXT, + + FOREIGN KEY (torrent_client_id) REFERENCES torrent_clients(id), + FOREIGN KEY (volume_id) REFERENCES volumes(id), + FOREIGN KEY (issue_id) REFERENCES issues(id) + ); + """) + return def setup_db() -> None: @@ -523,12 +561,29 @@ def setup_db() -> None: issue_id ) ); + CREATE TABLE IF NOT EXISTS torrent_clients( + id INTEGER PRIMARY KEY, + type VARCHAR(255) NOT NULL, + title VARCHAR(255) NOT NULL, + base_url TEXT NOT NULL, + username VARCHAR(255), + password VARCHAR(255), + api_token VARCHAR(255) + ); CREATE TABLE IF NOT EXISTS download_queue( id INTEGER PRIMARY KEY, + client_type VARCHAR(255) NOT NULL, + torrent_client_id INTEGER, + link TEXT NOT NULL, + filename_body TEXT NOT NULL, + source VARCHAR(25) NOT NULL, + volume_id INTEGER NOT NULL, issue_id INTEGER, - + page_link TEXT, + + FOREIGN KEY (torrent_client_id) REFERENCES torrent_clients(id), FOREIGN KEY (volume_id) REFERENCES volumes(id), FOREIGN KEY (issue_id) REFERENCES issues(id) ); @@ -599,7 +654,9 @@ def setup_db() -> None: ).fetchone()[0]) if current_db_version < __DATABASE_VERSION__: - logging.debug(f'Database migration: {current_db_version} -> {__DATABASE_VERSION__}') + logging.debug( + f'Database migration: {current_db_version} -> {__DATABASE_VERSION__}' + ) migrate_db(current_db_version) # Redundant but just to be sure, in case # the version isn't updated in the last migration of the function @@ -648,7 +705,10 @@ def setup_db() -> None: ) # Add service preferences - order = [(names[0], place + 1) for place, names in enumerate(supported_source_strings)] + order = [ + (names[0], place + 1) + for place, names in enumerate(supported_source_strings) + ] logging.debug(f'Inserting service preferences: {order}') cursor.executemany( """ diff --git a/backend/download.py b/backend/download.py deleted file mode 100644 index 628561fb..00000000 --- a/backend/download.py +++ /dev/null @@ -1,292 +0,0 @@ -#-*- coding: utf-8 -*- - -"""Handling the download queue and history -""" - -import logging -from os import listdir, makedirs, remove -from os.path import basename, join -from threading import Thread -from typing import List, Union - -from backend.blocklist import add_to_blocklist -from backend.custom_exceptions import DownloadLimitReached, DownloadNotFound -from backend.db import get_db -from backend.download_clients import Download, DownloadStates, MegaDownload -from backend.getcomics import _extract_download_links, credentials -from backend.post_processing import PostProcessing -from backend.settings import Settings, private_settings - - -#===================== -# Download handling -#===================== -class DownloadHandler: - """Handles downloads - """ - queue: List[Download] = [] - downloading_item: Union[Thread, None] = None - - def __init__(self, context) -> None: - """Setup the download handler - - Args: - context (Flask): A flask app instance - """ - self.context = context.app_context - self.load_download_thread = Thread(target=self.__load_downloads, name="Download Importer") - return - - def __run_download(self, download: Download) -> None: - """Start a download. Intended to be run in a thread. - - Args: - download (Download): The download to run. One of the entries in self.queue. - """ - logging.info(f'Starting download: {download.id}') - - with self.context(): - try: - download.run() - except DownloadLimitReached: - # Mega download limit reached mid-download - download.state == DownloadStates.CANCELED_STATE - self.queue = [e for e in self.queue if not isinstance(e['instance'], MegaDownload)] - else: - if download.state == DownloadStates.CANCELED_STATE: - PostProcessing(download, self.queue).short() - return - - if download.state == DownloadStates.FAILED_STATE: - PostProcessing(download, self.queue).error() - - if download.state == DownloadStates.DOWNLOADING_STATE: - download.state = DownloadStates.IMPORTING_STATE - PostProcessing(download, self.queue).full() - - self.queue.pop(0) - self.downloading_item = None - - self._process_queue() - return - - def _process_queue(self) -> None: - """Handle the queue. In the case that there is something in the queue and it isn't already downloading, - start the download. This can safely be called multiple times while a download is going or while there is - nothing in the queue. - """ - if not self.queue or self.downloading_item: - return - - # First entry in queue is not downloading at this point - self.downloading_item = Thread(target=self.__run_download, args=(self.queue[0],), name="Download Handler") - self.downloading_item.start() - - return - - def __load_downloads(self) -> None: - """Load downloads from the database and add them to the queue for re-downloading - """ - logging.debug('Loading downloads from database') - with self.context(): - cursor = get_db() - cursor2 = get_db('dict', temp=True) - cursor2.execute(""" - SELECT - id, - link, - volume_id, issue_id - FROM download_queue; - """) - for download in cursor2: - logging.debug(f'Download from database: {dict(download)}') - result = self.add(download['link'], download['volume_id'], download['issue_id'], download['id']) - if not result: - # Link is broken, which triggers a write to the database - # To avoid the database being locked for a long time while importing - # we commit in-between. - cursor.connection.commit() - cursor2.connection.close() - return - - def add(self, - link: str, - volume_id: int, issue_id: int=None, - _download_db_id_override: int=None - ) -> List[dict]: - """Add a download to the queue - - Args: - link (str): A getcomics link to download from - volume_id (int): The id of the volume for which the download is intended - issue_id (int, optional): The id of the issue for which the download is intended. Defaults to None. - _download_db_id_override (int, optional): Internal use only. Map download to an already existing entry in the database. Defaults to None. - - Returns: - List[dict]: Queue entries that were added from the link. - """ - logging.info( - f'Adding download for volume {volume_id}{f" issue {issue_id}" if issue_id else ""}: {link}' - ) - - # Check if link isn't already in queue - if any(d for d in self.queue if link in (d.page_link, d.download_link)): - logging.info('Download already in queue') - return [] - - is_gc_link = link.startswith(private_settings['getcomics_url']) - - downloads: List[Download] = [] - if is_gc_link: - # Extract download links and convert into Download instances - GC_downloads, limit_reached = _extract_download_links(link, volume_id, issue_id) - if not GC_downloads: - if not limit_reached: - # No links extracted from page so add it to blocklist - add_to_blocklist(link, 3) - logging.warning('Unable to extract download links from source') - if _download_db_id_override: - get_db().execute( - "DELETE FROM download_queue WHERE id = ?", - (_download_db_id_override,) - ) - return [] - downloads = GC_downloads - - result = [] - # Register download in database - db_id = _download_db_id_override or get_db().execute(""" - INSERT INTO download_queue(link, volume_id, issue_id) - VALUES (?,?,?); - """, - (link, volume_id, issue_id) - ).lastrowid - - for download in downloads: - download.id = self.queue[-1].id + 1 if self.queue else 1 - download.db_id = db_id - download.volume_id = volume_id - download.issue_id = issue_id - download.page_link = link if is_gc_link else None - - # Add to queue - result.append(download.todict()) - self.queue.append(download) - - self._process_queue() - return result - - def stop_handle(self) -> None: - """Cancel any running download and stop the handler - """ - logging.debug('Stopping download thread') - if self.downloading_item: - self.queue[0].stop() - self.downloading_item.join() - return - - def get_all(self) -> List[dict]: - """Get all queue entries - - Returns: - List[dict]: All queue entries, formatted using `Download.todict()`. - """ - return [e.todict() for e in self.queue] - - def get_one(self, download_id: int) -> dict: - """Get a queue entry based on it's id. - - Args: - download_id (int): The id of the download to fetch - - Raises: - DownloadNotFound: The id doesn't map to any download in the queue - - Returns: - dict: The queue entry, formatted using `Download.todict()`. - """ - for entry in self.queue: - if entry['id'] == download_id: - return entry.todict() - raise DownloadNotFound - - def remove(self, download_id: int) -> None: - """Remove a download entry from the queue - - Args: - download_id (int): The id of the download to remove from the queue - - Raises: - DownloadNotFound: The id doesn't map to any download in the queue - """ - logging.info(f'Removing download with id {download_id}') - - # Delete download from queue - for download in self.queue: - if download.id == download_id: - if download.state == DownloadStates.DOWNLOADING_STATE: - download.stop() - self.downloading_item.join() - self.downloading_item = None - self.queue.remove(download) - PostProcessing(download, self.queue)._remove_from_queue() - break - else: - raise DownloadNotFound - - self._process_queue() - return - - def create_download_folder(self) -> None: - """Create the download folder if it doesn't already. - """ - makedirs(Settings().get_settings()['download_folder'], exist_ok=True) - return - - def empty_download_folder(self) -> None: - """Empty the temporary download folder of files that aren't being downloaded. - Handy in the case that a crash left half-downloaded files behind in the folder. - """ - logging.info(f'Emptying the temporary download folder') - folder = Settings().get_settings()['download_folder'] - files_in_queue = [basename(download.file) for download in self.queue] - files_in_folder = listdir(folder) - ghost_files = [join(folder, f) for f in files_in_folder if not f in files_in_queue] - for f in ghost_files: - remove(f) - return - -#===================== -# Download History Managing -#===================== -def get_download_history(offset: int=0) -> List[dict]: - """Get the download history in blocks of 50. - - Args: - offset (int, optional): The offset of the list. The higher the number, the deeper into history you go. Defaults to 0. - - Returns: - List[dict]: The history entries. - """ - result = list(map( - dict, - get_db('dict').execute( - """ - SELECT - original_link, title, downloaded_at - FROM download_history - ORDER BY downloaded_at DESC - LIMIT 50 - OFFSET ?; - """, - (offset * 50,) - ) - )) - return result - -def delete_download_history() -> None: - """Delete complete download history - """ - logging.info('Deleting download history') - get_db().execute("DELETE FROM download_history;") - return diff --git a/backend/download_clients.py b/backend/download_direct_clients.py similarity index 59% rename from backend/download_clients.py rename to backend/download_direct_clients.py index 13816464..49910512 100644 --- a/backend/download_clients.py +++ b/backend/download_direct_clients.py @@ -4,8 +4,7 @@ """ import logging -from abc import ABC, abstractmethod -from os.path import basename, splitext, join +from os.path import basename, join, splitext from re import IGNORECASE, compile from time import perf_counter from urllib.parse import unquote_plus @@ -15,7 +14,8 @@ from backend.credentials import Credentials from backend.custom_exceptions import LinkBroken -from backend.settings import blocklist_reasons, Settings +from backend.download_general import Download, DownloadStates +from backend.settings import Settings, blocklist_reasons from .lib.mega import Mega, RequestError, sids @@ -23,49 +23,17 @@ credentials = Credentials(sids) download_chunk_size = 4194304 # 4MB Chunks -class DownloadStates: - QUEUED_STATE = 'queued' - DOWNLOADING_STATE = 'downloading' - IMPORTING_STATE = 'importing' - FAILED_STATE = 'failed' - CANCELED_STATE = 'canceled' - -class Download(ABC): - # This block is assigned after initialisation of the object - id: int - db_id: int - volume_id: int - issue_id: int - page_link: str - - source: str - download_link: str - - file: str - title: str - size: int - - state: str - progress: float - speed: float - - @abstractmethod - def run(self) -> None: - return - - @abstractmethod - def stop(self) -> None: - return - - @abstractmethod - def todict(self) -> dict: - return - class BaseDownload(Download): def __init__(self): + self.id = None self.state: str = DownloadStates.QUEUED_STATE def todict(self) -> dict: + """Represent the download in the form of a dict + + Returns: + dict: The dict with all relevant info about the download + """ return { 'id': self.id, 'volume_id': self.volume_id, @@ -74,6 +42,7 @@ def todict(self) -> dict: 'page_link': self.page_link, 'source': self.source, 'download_link': self.download_link, + 'type': self.type, 'file': self.file, 'title': self.title, @@ -88,16 +57,25 @@ def __repr__(self) -> str: return f'<{self.__class__.__name__}, {self.download_link}, {self.file}>' class DirectDownload(BaseDownload): - """For downloading a file directly from a link - """ - def __init__(self, link: str, filename_body: str, source: str, custom_name: bool=True): + "For downloading a file directly from a link" + + type = 'direct' + + def __init__(self, + link: str, + filename_body: str, + source: str, + custom_name: bool=True + ): """Setup the direct download Args: link (str): The link (that leads to a file) that should be used filename_body (str): The body of the filename to write to source (str): The name of the source of the link - custom_name (bool, optional): If the name supplied should be used or the default filename. Defaults to True. + custom_name (bool, optional): If the name supplied should be used + or the default filename. + Defaults to True. Raises: LinkBroken: The link doesn't work @@ -106,24 +84,32 @@ def __init__(self, link: str, filename_body: str, source: str, custom_name: bool super().__init__() self.progress: float = 0.0 self.speed: float = 0.0 + self.size: int = 0 self.download_link = link self.source = source + self._filename_body = filename_body - self.size: int = 0 r = get(self.download_link, stream=True) r.close() if not r.ok: raise LinkBroken(1, blocklist_reasons[1]) + self.size = int(r.headers.get('content-length', -1)) + if custom_name: - self.__filename_body = filename_body.rstrip('.') + self.title = filename_body.rstrip('.') else: - self.__filename_body = splitext(unquote_plus(self.download_link.split('/')[-1]))[0] + self.title = splitext(unquote_plus( + self.download_link.split('/')[-1] + ))[0] self.file = self.__build_filename(r) - self.title = splitext(basename(self.file))[0] - self.size = int(r.headers.get('content-length',-1)) + return - def __extract_extension(self, content_type: str, content_disposition: str, url: str) -> str: + def __extract_extension(self, + content_type: str, + content_disposition: str, + url: str + ) -> str: """Find the extension of the file behind the link Args: @@ -141,10 +127,9 @@ def __extract_extension(self, content_type: str, content_disposition: str, url: url )) ) + extension = '' if match: - extension = '.' + match[0] - else: - extension = '' + extension += '.' + match[0] return extension @@ -152,7 +137,7 @@ def __build_filename(self, r) -> str: """Build the filename from the download folder, filename body and extension Args: - r (_type_): The request to the link + r (Request): The request to the link Returns: str: The filename @@ -163,50 +148,57 @@ def __build_filename(self, r) -> str: r.headers.get('Content-Disposition', ''), r.url ) - return join(folder, self.__filename_body + extension) + return join(folder, self.title + extension) def run(self) -> None: - """Start the download - """ self.state = DownloadStates.DOWNLOADING_STATE size_downloaded = 0 - with get(self.download_link, stream=True) as r: - with open(self.file, 'wb') as f: - start_time = perf_counter() - try: - for chunk in r.iter_content(chunk_size=download_chunk_size): - if self.state == DownloadStates.CANCELED_STATE: - break - - f.write(chunk) - - # Update progress - chunk_size = len(chunk) - size_downloaded += chunk_size - self.speed = round(chunk_size / (perf_counter() - start_time), 2) - if self.size == -1: - # Total size of file is not given so set progress to amount downloaded - self.progress = size_downloaded - else: - # Total size of file is given so calculate progress and speed - self.progress = round(size_downloaded / self.size * 100, 2) - start_time = perf_counter() - - except ChunkedEncodingError: - self.state = DownloadStates.FAILED_STATE + with get(self.download_link, stream=True) as r, \ + open(self.file, 'wb') as f: + + start_time = perf_counter() + try: + for chunk in r.iter_content(chunk_size=download_chunk_size): + if self.state == DownloadStates.CANCELED_STATE: + break + + f.write(chunk) + + # Update progress + chunk_size = len(chunk) + size_downloaded += chunk_size + self.speed = round( + chunk_size / (perf_counter() - start_time), + 2 + ) + if self.size == -1: + # No file size so progress is amount downloaded + self.progress = size_downloaded + else: + self.progress = round( + size_downloaded / self.size * 100, + 2 + ) + start_time = perf_counter() + + except ChunkedEncodingError: + self.state = DownloadStates.FAILED_STATE return - def stop(self) -> None: - """Interrupt the download - """ - self.state = DownloadStates.CANCELED_STATE + def stop(self, + state: DownloadStates = DownloadStates.CANCELED_STATE + ) -> None: + self.state = state return class MegaDownload(BaseDownload): """For downloading a file via Mega - """ + """ + + type = 'mega' + @property def progress(self) -> float: return self._mega.progress @@ -219,14 +211,25 @@ def speed(self) -> float: def size(self) -> int: return self._mega.size - def __init__(self, link: str, filename_body: str, source: str='mega', custom_name: bool=True): + def __init__(self, + link: str, + filename_body: str, + source: str='mega', + custom_name: bool=True + ): """Setup the mega download Args: - link (str): The mega link - filename_body (str): The body of the filename to write to - source (str, optional): The name of the source of the link. Defaults to 'mega'. - custom_name (bool, optional): If the name supplied should be used or the default filename. Defaults to True. + link (str): The mega link. + + filename_body (str): The body of the filename to write to. + + source (str, optional): The name of the source of the link. + Defaults to 'mega'. + + custom_name (bool, optional): If the name supplied should be used + or the default filename. + Defaults to True. Raises: LinkBroken: The link doesn't work @@ -236,18 +239,19 @@ def __init__(self, link: str, filename_body: str, source: str='mega', custom_nam self.download_link = link self.source = source - self.__filename_body = filename_body.rstrip('.') cred = credentials.get_one_from_source('mega') try: self._mega = Mega(link, cred['email'], cred['password']) except RequestError: raise LinkBroken(1, blocklist_reasons[1]) + self._filename_body = filename_body if not custom_name: - self.__filename_body = splitext(self._mega.mega_filename)[0] + self._filename_body = splitext(self._mega.mega_filename)[0] self.file = self.__build_filename() self.title = splitext(basename(self.file))[0] + return def __extract_extension(self) -> str: """Find the extension of the file behind the link @@ -258,14 +262,15 @@ def __extract_extension(self) -> str: return splitext(self._mega.mega_filename)[1] def __build_filename(self) -> str: - """Build the filename from the download folder, filename body and extension + """Build the filename from the download folder, filename body + and extension Returns: str: The filename """ folder = Settings().get_settings()['download_folder'] extension = self.__extract_extension() - return join(folder, self.__filename_body + extension) + return join(folder, self._filename_body + extension) def run(self) -> None: """ @@ -276,9 +281,11 @@ def run(self) -> None: """ self.state = DownloadStates.DOWNLOADING_STATE self._mega.download_url(self.file) + return - def stop(self) -> None: - """Interrupt the download - """ - self.state = DownloadStates.CANCELED_STATE + def stop(self, + state: DownloadStates = DownloadStates.CANCELED_STATE + ) -> None: + self.state = state self._mega.downloading = False + return diff --git a/backend/download_general.py b/backend/download_general.py new file mode 100644 index 00000000..92985164 --- /dev/null +++ b/backend/download_general.py @@ -0,0 +1,308 @@ +#-*- coding: utf-8 -*- + +"""To avoid import loops, general classes regarding downloading are here. +""" + +from abc import ABC, abstractmethod +from typing import Tuple, Union + +from backend.custom_exceptions import (InvalidKeyValue, KeyNotFound, + TorrentClientDownloading, + TorrentClientNotWorking) +from backend.db import get_db + + +class DownloadStates: + QUEUED_STATE = 'queued' + DOWNLOADING_STATE = 'downloading' + SEEDING_STATE = 'seeding' + IMPORTING_STATE = 'importing' + + FAILED_STATE = 'failed' + "Download was unsuccessful" + CANCELED_STATE = 'canceled' + "Download was removed from queue" + SHUTDOWN_STATE = 'shutting down' + "Download was stopped because Kapowarr is shutting down" + +class Download(ABC): + # This block is assigned after initialisation of the object + id: Union[int, None] + volume_id: Union[int, None] + issue_id: Union[int, None] + page_link: Union[str, None] + + _filename_body: str + source: str + download_link: str + type: str + + file: str + title: str + size: int + + state: str + progress: float + speed: float + + @abstractmethod + def __init__( + self, + link: str, + filename_body: str, + source: str, + custom_name: bool=True + ) -> None: + """Create the download instance + + Args: + link (str): The link to the download + (could be direct download link, mega link or magnet link) + + filename_body (str): The body of the file to download to + + source (str): The source of the download + + custom_name (bool, optional): Whether or not to use the filename body + or to use the default name of the download. Defaults to True. + """ + return + + @abstractmethod + def run(self) -> None: + """Start the download + """ + return + + @abstractmethod + def stop(self, state: DownloadStates=DownloadStates.CANCELED_STATE) -> None: + """Interrupt the download + + Args: + state (DownloadStates, optional): The state to set for the download. + Defaults to DownloadStates.CANCELED_STATE. + """ + return + + @abstractmethod + def todict(self) -> dict: + return + +class TorrentClient(ABC): + id: int + type: str + title: str + base_url: str + username: Union[str, None] + password: Union[str, None] + api_token: Union[str, None] + + _tokens: Tuple[str] = ('title', 'base_url') + """The keys the client needs or could need for operation + (mostly whether it's username + password or api_token)""" + + @abstractmethod + def __init__(self, id: int) -> None: + """Create a connection with a torrent client + + Args: + id (int): The id of the torrent client + """ + return + + @abstractmethod + def todict(self) -> dict: + """Get info about torrent client in a dict + + Returns: + dict: The info about the torrent client + """ + return + + @abstractmethod + def edit(self, edits: dict) -> dict: + """Edit the torrent client + + Args: + edits (dict): The keys and their new values for + the torrent client settings + + Raises: + TorrentClientDownloading: The is a download in the queue + using the client + + Returns: + dict: The new info of the torrent client + """ + return + + @abstractmethod + def delete(self) -> None: + """Delete the torrent client + + Raises: + TorrentClientDownloading: There is a download in the queue + using the client + """ + return + + @abstractmethod + def add_torrent(self, + magnet_link: str, + target_folder: str, + torrent_name: Union[str, None] + ) -> int: + """Add a torrent to the client for downloading + + Args: + magnet_link (str): The magnet link of the torrent to download + target_folder (str): The folder to download in + torrent_name (Union[str, None]): The name of the torrent in the client + Set to `None` to keep original name. + + Returns: + int: The id of the entry in the download client + """ + return + + @abstractmethod + def get_torrent_status(self, torrent_id: int) -> dict: + """Get the status of the torrent in a dict + + Args: + torrent_id (int): The id of the torrent to get status of + + Returns: + dict: The status of the torrent + """ + return + + @abstractmethod + def delete_torrent(self, torrent_id: int, delete_files: bool) -> None: + """Remove the torrent from the client + + Args: + torrent_id (int): The id of the torrent to delete + delete_files (bool): Delete the downloaded files + """ + return + + @staticmethod + @abstractmethod + def test( + base_url: str, + username: Union[str, None], + password: Union[str, None], + api_token: Union[str, None] + ) -> bool: + """Check if a torrent client is working + + Args: + base_url (str): The base url on which the client is running. + username (Union[str, None]): The username to access the client, if set. + password (Union[str, None]): The password to access the client, if set. + api_token (Union[str, None]): The api token to access the client, if set. + + Returns: + bool: Whether or not the test succeeded + """ + return + +class BaseTorrentClient(TorrentClient): + def __init__(self, id: int) -> None: + self.id = id + data = get_db('dict').execute(""" + SELECT + type, title, + base_url, + username, password, + api_token + FROM torrent_clients + WHERE id = ? + LIMIT 1; + """, + (id,) + ).fetchone() + self.type = data['type'] + self.title = data['title'] + self.base_url = data['base_url'] + self.username = data['username'] + self.password = data['password'] + self.api_token = data['api_token'] + return + + def todict(self) -> dict: + return { + 'id': self.id, + 'type': self.type, + 'title': self.title, + 'base_url': self.base_url, + 'username': self.username, + 'password': self.password, + 'api_token': self.api_token + } + + def edit(self, edits: dict) -> dict: + cursor = get_db() + if cursor.execute( + "SELECT 1 FROM download_queue WHERE torrent_client_id = ? LIMIT 1;", + (self.id,) + ).fetchone() is not None: + raise TorrentClientDownloading + + from backend.download_torrent_clients import client_types + data = {} + for key in ('title', 'base_url', 'username', 'password', 'api_token'): + if key in self._tokens and not key in edits: + raise KeyNotFound(key) + if key in ('title', 'base_url') and edits[key] is None: + raise InvalidKeyValue(key, None) + data[key] = edits.get(key) if key in self._tokens else None + + if data['username'] is not None and data['password'] is None: + raise InvalidKeyValue('password', data['password']) + + data['base_url'] = data['base_url'].rstrip('/') + + ClientClass = client_types[self.type] + test_result = ClientClass.test( + data['base_url'], + data['username'], + data['password'], + data['api_token'] + ) + if not test_result: + raise TorrentClientNotWorking + + cursor.execute(""" + UPDATE torrent_clients SET + title = ?, + base_url = ?, + username = ?, + password = ?, + api_token = ? + WHERE id = ?; + """, + (data['title'], data['base_url'], + data['username'], data['password'], data['api_token'], + self.id) + ) + return ClientClass(self.id).todict() + + def delete(self) -> None: + cursor = get_db() + if cursor.execute( + "SELECT 1 FROM download_queue WHERE torrent_client_id = ? LIMIT 1;", + (self.id,) + ).fetchone() is not None: + raise TorrentClientDownloading + + cursor.execute( + "DELETE FROM torrent_clients WHERE id = ?;", + (self.id,) + ) + + return None + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}; ID {self.id}; {id(self)}>' diff --git a/backend/download_queue.py b/backend/download_queue.py new file mode 100644 index 00000000..954e7978 --- /dev/null +++ b/backend/download_queue.py @@ -0,0 +1,470 @@ +#-*- coding: utf-8 -*- + +"""Handling the download queue and history +""" + +import logging +from os import listdir, makedirs, remove +from os.path import basename, join +from threading import Thread +from time import sleep +from typing import Dict, List, Union + +from backend.blocklist import add_to_blocklist +from backend.custom_exceptions import (DownloadLimitReached, DownloadNotFound, + LinkBroken) +from backend.db import get_db +from backend.download_direct_clients import (DirectDownload, Download, + MegaDownload) +from backend.download_general import DownloadStates +from backend.download_torrent_clients import TorrentClients, TorrentDownload +from backend.getcomics import _extract_download_links +from backend.post_processing import PostProcesser, PostProcesserTorrents +from backend.settings import Settings, private_settings + +#===================== +# Download handling +#===================== +download_type_to_class: Dict[str, Download] = { + c.type: c + for c in Download.__subclasses__()[0].__subclasses__() +} + +class DownloadHandler: + queue: List[Download] = [] + downloading_item: Union[Thread, None] = None + + def __init__(self, context) -> None: + """Setup the download handler + + Args: + context (Flask): A flask app instance + """ + self.context = context.app_context + self.load_download_thread = Thread( + target=self.__load_downloads, + name="Download Importer" + ) + return + + def __choose_torrent_client(self) -> int: + """Get the ID of the torrent client with the least downloads + + Returns: + int: The ID of the client + """ + torrent_clients = [ + tc[0] + for tc in get_db().execute( + "SELECT id FROM torrent_clients;" + ) + ] + queue_ids = [ + d.client.id + for d in self.queue + if isinstance(d, TorrentDownload) + ] + sorted_list = sorted(torrent_clients, key=lambda c: queue_ids.count(c)) + return sorted_list[0] + + def __run_download(self, download: Download) -> None: + """Start a download. Intended to be run in a thread. + + Args: + download (Download): The download to run. + One of the entries in self.queue. + """ + logging.info(f'Starting download: {download.id}') + + with self.context(): + try: + download.run() + + except DownloadLimitReached: + # Mega download limit reached mid-download + download.state = DownloadStates.FAILED_STATE + self.queue = [ + e + for e in self.queue + if ( + not isinstance(e['instance'], MegaDownload) + or e == download + ) + ] + + if download.state == DownloadStates.CANCELED_STATE: + PostProcesser.canceled(download) + + elif download.state == DownloadStates.FAILED_STATE: + PostProcesser.failed(download) + + elif download.state == DownloadStates.SHUTDOWN_STATE: + PostProcesser.shutdown(download) + return + + elif download.state == DownloadStates.DOWNLOADING_STATE: + download.state = DownloadStates.IMPORTING_STATE + PostProcesser.success(download) + + self.queue.remove(download) + self.downloading_item = None + + self._process_queue() + return + + def __run_torrent_download(self, download: TorrentDownload) -> None: + """Start a torrent download. Intended to be run in a thread. + + Args: + download (TorrentDownload): The torrent download to run. + One of the entries in self.queue. + """ + download.run() + + with self.context(): + while True: + download.update_status() + + if download.state in ( + DownloadStates.QUEUED_STATE, + DownloadStates.DOWNLOADING_STATE, + DownloadStates.SEEDING_STATE + ): + sleep(private_settings['torrent_update_interval']) + continue + + if download.state == DownloadStates.CANCELED_STATE: + download.remove_from_client(delete_files=True) + PostProcesserTorrents.canceled(download) + + elif download.state == DownloadStates.FAILED_STATE: + download.remove_from_client(delete_files=True) + PostProcesserTorrents.failed(download) + + elif download.state == DownloadStates.SHUTDOWN_STATE: + download.remove_from_client(delete_files=True) + PostProcesserTorrents.shutdown(download) + return + + elif download.state == DownloadStates.IMPORTING_STATE: + download.remove_from_client(delete_files=False) + PostProcesserTorrents.success(download) + + self.queue.remove(download) + return + + def _process_queue(self) -> None: + """Handle the queue. In the case that there is something in the queue + and it isn't already downloading, start the download. This can safely be + called multiple times while a download is going or while there is nothing + in the queue. + """ + if not self.queue or self.downloading_item: + return + + first_direct_download = next( + ( + e + for e in self.queue + if isinstance(e, (DirectDownload, MegaDownload)) + ), + None + ) + if not first_direct_download: + return + + # First entry in queue is not downloading at this point + self.downloading_item = Thread( + target=self.__run_download, + args=(first_direct_download,), + name="Download Handler" + ) + self.downloading_item.start() + + return + + def __prepare_downloads_for_queue( + self, + downloads: List[Download], + volume_id: int, + issue_id: int, + page_link: Union[str, None] + ) -> List[Download]: + + cursor = get_db() + for download in downloads: + download.volume_id = volume_id + download.issue_id = issue_id + download.page_link = page_link + + if download.id is None: + download.id = cursor.execute(""" + INSERT INTO download_queue( + client_type, torrent_client_id, + link, filename_body, source, + volume_id, issue_id, page_link + ) + VALUES (?, ?, ?, ?, ?, ?, ?, ?); + """, + ( + download.type, + None, + download.download_link, + download._filename_body, + download.source, + download.volume_id, + download.issue_id, + download.page_link + ) + ).lastrowid + + if isinstance(download, TorrentDownload): + if download.client is None: + download.client = TorrentClients.get_client( + self.__choose_torrent_client() + ) + cursor.execute(""" + UPDATE download_queue + SET torrent_client_id = ? + WHERE id = ?; + """, + (download.client.id, download.id) + ) + + download._download_thread = Thread( + target=self.__run_torrent_download, + args=(download,), + name='Torrent Download Handler' + ) + download._download_thread.start() + return downloads + + def __load_downloads(self) -> None: + """Load downloads from the database and add them to the queue + for re-downloading + """ + with self.context(): + cursor = get_db('dict') + downloads = cursor.execute(""" + SELECT + id, client_type, torrent_client_id, + link, filename_body, source, + volume_id, issue_id, page_link + FROM download_queue; + """).fetchall() + + if downloads: + logging.info('Loading downloads') + + for download in downloads: + logging.debug(f'Download from database: {dict(download)}') + try: + dl_instance = download_type_to_class[download['client_type']]( + link=download['link'], + filename_body=download['filename_body'], + source=download['source'], + custom_name=True + ) + dl_instance.id = download['id'] + dl_instance.client = TorrentClients.get_client( + download['torrent_client_id'] + ) + + except LinkBroken as lb: + # Link is broken + add_to_blocklist(download['link'], lb.reason_id) + # Link is broken, which triggers a write to the database + # To avoid the database being locked for a long time while + # importing, we commit in-between. + cursor.connection.commit() + + except DownloadLimitReached: + continue + + self.queue += self.__prepare_downloads_for_queue( + [dl_instance], + download['volume_id'], + download['issue_id'], + download['page_link'] + ) + + self._process_queue() + return + + def add(self, + link: str, + volume_id: int, + issue_id: int=None + ) -> List[dict]: + """Add a download to the queue + + Args: + link (str): A getcomics link to download from + volume_id (int): The id of the volume for which the download is intended + issue_id (int, optional): The id of the issue for which the download + is intended. + Defaults to None. + + Returns: + List[dict]: Queue entries that were added from the link. + """ + logging.info( + 'Adding download for ' + + f'volume {volume_id}{f" issue {issue_id}" if issue_id else ""}: {link}' + ) + + # Check if link isn't already in queue + if any(d for d in self.queue if link in (d.page_link, d.download_link)): + logging.info('Download already in queue') + return [] + + is_gc_link = link.startswith(private_settings['getcomics_url']) + + downloads: List[Download] = [] + if is_gc_link: + # Extract download links and convert into Download instances + GC_downloads, limit_reached = _extract_download_links( + link, + volume_id, + issue_id + ) + + if not GC_downloads: + if not limit_reached: + # No links extracted from page so add it to blocklist + add_to_blocklist(link, 3) + logging.warning( + f'Unable to extract download links from source; {limit_reached=}' + ) + return [] + + downloads = GC_downloads + + result = self.__prepare_downloads_for_queue( + downloads, + volume_id, + issue_id, + link if is_gc_link else None + ) + self.queue += result + + self._process_queue() + return [r.todict() for r in result] + + def stop_handle(self) -> None: + """Cancel any running download and stop the handler + """ + logging.debug('Stopping download thread') + + for e in self.queue: + e.stop(DownloadStates.SHUTDOWN_STATE) + + if self.downloading_item: + self.downloading_item.join() + + return + + def get_all(self) -> List[dict]: + """Get all queue entries + + Returns: + List[dict]: All queue entries, formatted using `Download.todict()`. + """ + return [e.todict() for e in self.queue] + + def get_one(self, download_id: int) -> dict: + """Get a queue entry based on it's id. + + Args: + download_id (int): The id of the download to fetch + + Raises: + DownloadNotFound: The id doesn't map to any download in the queue + + Returns: + dict: The queue entry, formatted using `Download.todict()`. + """ + for entry in self.queue: + if entry['id'] == download_id: + return entry.todict() + raise DownloadNotFound + + def remove(self, download_id: int) -> None: + """Remove a download entry from the queue + + Args: + download_id (int): The id of the download to remove from the queue + + Raises: + DownloadNotFound: The id doesn't map to any download in the queue + """ + logging.info(f'Removing download with id {download_id}') + + for download in self.queue: + if download.id == download_id: + download.stop() + break + else: + raise DownloadNotFound + + return + + def create_download_folder(self) -> None: + """Create the download folder if it doesn't already. + """ + makedirs(Settings().get_settings()['download_folder'], exist_ok=True) + return + + def empty_download_folder(self) -> None: + """Empty the temporary download folder of files that aren't being downloaded. + Handy in the case that a crash left half-downloaded files behind in the folder. + """ + logging.info(f'Emptying the temporary download folder') + folder = Settings().get_settings()['download_folder'] + files_in_queue = [basename(download.file) for download in self.queue] + files_in_folder = listdir(folder) + ghost_files = [ + join(folder, f) + for f in files_in_folder + if not f in files_in_queue + ] + for f in ghost_files: + remove(f) + return + +#===================== +# Download History Managing +#===================== +def get_download_history(offset: int=0) -> List[dict]: + """Get the download history in blocks of 50. + + Args: + offset (int, optional): The offset of the list. + The higher the number, the deeper into history you go. + Defaults to 0. + + Returns: + List[dict]: The history entries. + """ + result = list(map( + dict, + get_db('dict').execute( + """ + SELECT + original_link, title, downloaded_at + FROM download_history + ORDER BY downloaded_at DESC + LIMIT 50 + OFFSET ?; + """, + (offset * 50,) + ) + )) + return result + +def delete_download_history() -> None: + """Delete complete download history + """ + logging.info('Deleting download history') + get_db().execute("DELETE FROM download_history;") + return diff --git a/backend/download_torrent_clients.py b/backend/download_torrent_clients.py new file mode 100644 index 00000000..48f068b1 --- /dev/null +++ b/backend/download_torrent_clients.py @@ -0,0 +1,294 @@ +#-*- coding: utf-8 -*- + +"""Downloading using torrents +""" + +import logging +from os.path import join +from typing import Dict, List, Union + +from bencoding import bdecode +from requests import post + +from backend.custom_exceptions import (InvalidKeyValue, TorrentClientNotFound, + TorrentClientNotWorking) +from backend.db import get_db +from backend.download_direct_clients import BaseDownload +from backend.download_general import DownloadStates, TorrentClient +from backend.settings import Settings +from backend.torrent_clients import qBittorrent + +#===================== +# Managing clients +#===================== + +client_types: Dict[str, TorrentClient] = { + 'qBittorrent': qBittorrent.qBittorrent +} + +class TorrentClients: + @staticmethod + def test( + type: str, + base_url: str, + username: Union[str, None], + password: Union[str, None], + api_token: Union[str, None] + ) -> bool: + """Test if a client is supported, working and available + + Args: + type (str): The client identifier. + A key from `download_torrent_clients.client_types`. + + base_url (str): The base url that Kapowarr needs to connect to the client + + username (Union[str, None]): The username to use when authenticating to the client. + Allowed to be `None` if not applicable. + + password (Union[str, None]): The password to use when authenticating to the client. + Allowed to be `None` if not applicable. + + api_token (Union[str, None]): The api token to use when authenticating to the client. + Allowed to be `None` if not applicable. + + Raises: + InvalidKeyValue: One of the parameters has an invalid argument + + Returns: + bool: Whether or not the test was successful or not + """ + if not type in client_types: + raise InvalidKeyValue('type', type) + + base_url = base_url.rstrip('/') + + result = client_types[type].test( + base_url, + username, + password, + api_token + ) + return result + + @staticmethod + def add( + type: str, + title: str, + base_url: str, + username: Union[str, None], + password: Union[str, None], + api_token: Union[str, None] + ) -> TorrentClient: + """Add a torrent client + + Args: + type (str): The client identifier. + A key from `download_torrent_clients.client_types`. + + title (str): The title to give the client + + base_url (str): The base url to use when connecting to the client + + username (Union[str, None]): The username to use when authenticating to the client. + Allowed to be `None` if not applicable. + + password (Union[str, None]): The password to use when authenticating to the client. + Allowed to be `None` if not applicable. + + api_token (Union[str, None]): The api token to use when authenticating to the client. + Allowed to be `None` if not applicable. + + Raises: + InvalidKeyValue: One of the parameters has an invalid argument + TorrentClientNotWorking: Testing the client failed. + The function `download_torrent_clients.TorrentClients.test` returned `False`. + + Returns: + TorrentClient: An instance of `download_general.TorrentClient` + representing the newly added client + """ + if not type in client_types: + raise InvalidKeyValue('type', type) + + if title is None: + raise InvalidKeyValue('title', title) + + if base_url is None: + raise InvalidKeyValue('base_url', base_url) + + if username is not None and password is None: + raise InvalidKeyValue('password', password) + + base_url = base_url.rstrip('/') + + ClientClass = client_types[type] + test_result = ClientClass.test( + base_url, + username, + password, + api_token + ) + if not test_result: + raise TorrentClientNotWorking + + data = { + 'type': type, + 'title': title, + 'base_url': base_url, + 'username': username, + 'password': password, + 'api_token': api_token + } + data = { + k: (v if k in (*ClientClass._tokens, 'type') else None) + for k, v in data.items() + } + + client_id = get_db().execute(""" + INSERT INTO torrent_clients( + type, title, + base_url, + username, password, api_token + ) VALUES (?, ?, ?, ?, ?, ?); + """, + (data['type'], data['title'], + data['base_url'], + data['username'], data['password'], data['api_token']) + ).lastrowid + return ClientClass(client_id) + + @staticmethod + def get_clients() -> List[dict]: + """Get a list of all torrent clients + + Returns: + List[dict]: The list with all torrent clients + """ + cursor = get_db('dict') + cursor.execute(""" + SELECT + id, type, + title, base_url, + username, password, + api_token + FROM torrent_clients + ORDER BY title, id; + """ + ) + result = [dict(r) for r in cursor] + return result + + @staticmethod + def get_client(id: int) -> TorrentClient: + """Get a torrent client based on it's ID. + + Args: + id (int): The ID of the torrent client + + Raises: + TorrentClientNotFound: The ID does not link to any client + + Returns: + TorrentClient: An instance of `download_general.TorrentClient` + representing the client with the given ID. + """ + client_type = get_db().execute( + "SELECT type FROM torrent_clients WHERE id = ? LIMIT 1;", + (id,) + ).fetchone() + + if not client_type: + raise TorrentClientNotFound + + return client_types[client_type[0]](id) + +#===================== +# Downloading torrents +#===================== + +class TorrentDownload(BaseDownload): + "For downloading a torrent using a torrent client" + + type = 'torrent' + + def __init__( + self, + link: str, + filename_body: str, + source: str, + custom_name: bool=True + ) -> None: + logging.debug(f'Creating torrent download: {link}, {filename_body}') + super().__init__() + self.client: Union[TorrentClient, None] = None + self.source = source + self.download_link = link + self._filename_body = filename_body + self.file = None + self.size: int = 0 + self.progress: float = 0.0 + self.speed: float = 0.0 + self._torrent_id = None + self._download_thread = None + self._download_folder = Settings().get_settings()['download_folder'] + + if custom_name: + self.title = filename_body.rstrip('.') + + # Find name of torrent as it is folder that it's downloaded in + r = post( + 'https://magnet2torrent.com/upload/', + data={'magnet': link}, + headers={'User-Agent': 'Kapowarr'} + ) + if r.headers.get('content-type') != 'application/x-bittorrent': + raise NotImplementedError + + name = bdecode(r.content)[b'info'][b'name'].decode() + self.title = self.title or name + self.file = join(self._download_folder, name) + + return + + def run(self) -> None: + self._torrent_id = self.client.add_torrent( + self.download_link, + self._download_folder, + self.title + ) + return + + def update_status(self) -> None: + """ + Update the various variables about the state/progress + of the torrent download + """ + torrent_status = self.client.get_torrent_status(self._torrent_id) + self.progress = torrent_status['progress'] + self.speed = torrent_status['speed'] + self.size = torrent_status['size'] + if not self.state == DownloadStates.CANCELED_STATE: + self.state = torrent_status['state'] + return + + def stop(self, + state: DownloadStates = DownloadStates.CANCELED_STATE + ) -> None: + self.state = state + return + + def remove_from_client(self, delete_files: bool) -> None: + """Remove the download from the torrent client + + Args: + delete_files (bool): Delete downloaded files + """ + self.client.delete_torrent(self._torrent_id, delete_files) + return + + def todict(self) -> dict: + return { + **super().todict(), + 'client': self.client.id + } diff --git a/backend/getcomics.py b/backend/getcomics.py index 09144efc..519d9ed3 100644 --- a/backend/getcomics.py +++ b/backend/getcomics.py @@ -13,28 +13,35 @@ from backend.blocklist import add_to_blocklist, blocklist_contains from backend.custom_exceptions import DownloadLimitReached, LinkBroken from backend.db import get_db -from backend.download_clients import (DirectDownload, Download, MegaDownload, - credentials) +from backend.download_direct_clients import (DirectDownload, Download, + MegaDownload) +from backend.download_torrent_clients import TorrentDownload from backend.files import extract_filename_data from backend.naming import (generate_empty_name, generate_issue_name, generate_issue_range_name, generate_tpb_name) from backend.search import _check_matching_titles -from backend.settings import (Settings, blocklist_reasons, private_settings, +from backend.settings import (Settings, blocklist_reasons, supported_source_strings) mega_regex = compile(r'https?://mega\.(nz|io)/(#(F\!|\!)|folder/|file/)', IGNORECASE) mediafire_regex = compile(r'https?://www\.mediafire\.com/', IGNORECASE) extract_mediafire_regex = compile(r'window.location.href\s?=\s?\'https://download\d+\.mediafire.com/.*?(?=\')', IGNORECASE) -def _check_download_link(link_text: str, link: str) -> Union[str, None]: +def _check_download_link( + link_text: str, + link: str, + torrent_client_available: bool +) -> Union[str, None]: """Check if download link is supported and allowed Args: link_text (str): The title of the link link (str): The link itself + torrent_client_available (bool): Whether or not a torrent client is available Returns: - Union[str, None]: Either the name of the service (e.g. `mega`) or `None` if it's not allowed + Union[str, None]: Either the name of the service (e.g. `mega`) + or `None` if it's not allowed. """ logging.debug(f'Checking download link: {link}, {link_text}') if not link: @@ -50,14 +57,19 @@ def _check_download_link(link_text: str, link: str) -> Union[str, None]: # Check if link is from supported source for source in supported_source_strings: - if link_text in source: + if any(s in link_text for s in source): logging.debug(f'Checking download link: {link_text} maps to {source[0]}') + + if 'torrent' in source[0] and not torrent_client_available: + return + return source[0] return def _purify_link(link: str) -> dict: - """Extract the link that directly leads to the download from the link on the getcomics page + """Extract the link that directly leads to the download from the link + on the getcomics page Args: link (str): The link on the getcomics page @@ -66,13 +78,19 @@ def _purify_link(link: str) -> dict: LinkBroken: Link is invalid, not supported or broken Returns: - dict: The pure link, a download instance for the correct service (e.g. DirectDownload or MegaDownload) and the source title + dict: The pure link, + a download instance for the correct service (child of `download_general.Download`) + and the source title. """ logging.debug(f'Purifying link: {link}') # Go through every link and get it all down to direct download or magnet links if link.startswith('magnet:?'): # Link is already magnet link - raise LinkBroken(2, blocklist_reasons[2]) + return { + 'link': link, + 'target': TorrentDownload, + 'source': 'getcomics (torrent)' + } elif link.startswith('http'): r = get(link, headers={'User-Agent': 'Kapowarr'}, stream=True) @@ -80,11 +98,15 @@ def _purify_link(link: str) -> dict: if mega_regex.search(url): # Link is mega - if not '#F!' in url and not '/folder/' in url: - return {'link': url, 'target': MegaDownload, 'source': 'mega'} - # else - # Link is not supported (folder most likely) - raise LinkBroken(2, blocklist_reasons[2]) + if '#F!' in url: + # Link is not supported (folder) + raise LinkBroken(2, blocklist_reasons[2]) + + if '/folder/' in url: + # Link is not supported (folder) + raise LinkBroken(2, blocklist_reasons[2]) + + return {'link': url, 'target': MegaDownload, 'source': 'mega'} elif mediafire_regex.search(url): # Link is mediafire @@ -93,41 +115,56 @@ def _purify_link(link: str) -> dict: raise LinkBroken(1, blocklist_reasons[1]) elif '/folder/' in url: - # Link is not supported (folder most likely) + # Link is not supported raise LinkBroken(2, blocklist_reasons[2]) result = extract_mediafire_regex.search(r.text) if result: - return {'link': result.group(0).split("'")[-1], 'target': DirectDownload, 'source': 'mediafire'} + return { + 'link': result.group(0).split("'")[-1], + 'target': DirectDownload, + 'source': 'mediafire' + } soup = BeautifulSoup(r.text, 'html.parser') button = soup.find('a', {'id': 'downloadButton'}) if button: - return {'link': button['href'], 'target': DirectDownload, 'source': 'mediafire'} - - # Link is not broken and not a folder but we still can't find the download button... + return { + 'link': button['href'], + 'target': DirectDownload, + 'source': 'mediafire' + } + + # Link is not broken and not a folder + # but we still can't find the download button... raise LinkBroken(1, blocklist_reasons[1]) - elif url.startswith('magnet:?'): - # Link is magnet link - raise LinkBroken(2, blocklist_reasons[2]) - return {'link': url, 'target': None, 'source': 'torrent'} - elif r.headers.get('Content-Type','') == 'application/x-bittorrent': # Link is torrent file - raise LinkBroken(2, blocklist_reasons[2]) hash = sha1(bencode(bdecode(r.content)[b"info"])).hexdigest() - return {'link': "magnet:?xt=urn:btih:" + hash + "&tr=udp://tracker.cyberia.is:6969/announce&tr=udp://tracker.port443.xyz:6969/announce&tr=http://tracker3.itzmx.com:6961/announce&tr=udp://tracker.moeking.me:6969/announce&tr=http://vps02.net.orel.ru:80/announce&tr=http://tracker.openzim.org:80/announce&tr=udp://tracker.skynetcloud.tk:6969/announce&tr=https://1.tracker.eu.org:443/announce&tr=https://3.tracker.eu.org:443/announce&tr=http://re-tracker.uz:80/announce&tr=https://tracker.parrotsec.org:443/announce&tr=udp://explodie.org:6969/announce&tr=udp://tracker.filemail.com:6969/announce&tr=udp://tracker.nyaa.uk:6969/announce&tr=udp://retracker.netbynet.ru:2710/announce&tr=http://tracker.gbitt.info:80/announce&tr=http://tracker2.dler.org:80/announce", - 'target': None} + return { + 'link': "magnet:?xt=urn:btih:" + hash + "&tr=udp://tracker.cyberia.is:6969/announce&tr=udp://tracker.port443.xyz:6969/announce&tr=http://tracker3.itzmx.com:6961/announce&tr=udp://tracker.moeking.me:6969/announce&tr=http://vps02.net.orel.ru:80/announce&tr=http://tracker.openzim.org:80/announce&tr=udp://tracker.skynetcloud.tk:6969/announce&tr=https://1.tracker.eu.org:443/announce&tr=https://3.tracker.eu.org:443/announce&tr=http://re-tracker.uz:80/announce&tr=https://tracker.parrotsec.org:443/announce&tr=udp://explodie.org:6969/announce&tr=udp://tracker.filemail.com:6969/announce&tr=udp://tracker.nyaa.uk:6969/announce&tr=udp://retracker.netbynet.ru:2710/announce&tr=http://tracker.gbitt.info:80/announce&tr=http://tracker2.dler.org:80/announce", + 'target': TorrentDownload, + 'source': 'getcomics (torrent)' + } - # Link is direct download from getcomics ('Main Server', 'Mirror Server', 'Link 1', 'Link 2', etc.) + # Link is direct download from getcomics + # ('Main Server', 'Mirror Server', 'Link 1', 'Link 2', etc.) return {'link': url, 'target': DirectDownload, 'source': 'getcomics'} else: raise LinkBroken(2, blocklist_reasons[2]) -link_filter_1 = lambda e: e.name == 'p' and 'Language' in e.text and e.find('p') is None -link_filter_2 = lambda e: e.name == 'li' and e.parent.name == 'ul' and ((0 < e.text.count('|') == len(e.find_all('a')) - 1) or (e.find('a') and _check_download_link(e.find('a').text.strip().lower(), e.find('a').attrs.get('href')))) +link_filter_1 = lambda e: ( + e.name == 'p' + and 'Language' in e.text + and e.find('p') is None +) +link_filter_2 = lambda e: ( + e.name == 'li' + and e.parent.name == 'ul' + and e.find('a') +) check_year = compile(r'\b\d{4}\b') def _extract_get_comics_links( soup: BeautifulSoup @@ -141,7 +178,8 @@ def _extract_get_comics_links( Returns: Dict[str, Dict[str, List[str]]]: The outer dict maps the group name to the group. - The group is a dict that maps each service in the group to a list of links for that service. + The group is a dict that maps each service in the group to a list of links + for that service. Example: { 'Amazing Spider-Man V1 Issue 1-10': { @@ -152,6 +190,11 @@ def _extract_get_comics_links( } """ logging.debug('Extracting download groups') + + torrent_client_available = get_db().execute( + "SELECT 1 FROM torrent_clients" + ).fetchone() is not None + download_groups = {} body = soup.find('section', {'class': 'post-contents'}) for result in body.find_all(link_filter_1): @@ -170,7 +213,7 @@ def _extract_get_comics_links( elif e.name == 'div' and 'aio-button-center' in (e.attrs.get('class', [])): group_link = e.find('a') link_title = group_link.text.strip().lower() - match = _check_download_link(link_title, group_link['href']) + match = _check_download_link(link_title, group_link['href'], torrent_client_available) if match: group_links.setdefault(match, []).append(group_link['href']) if group_links: @@ -183,7 +226,7 @@ def _extract_get_comics_links( group_links = {} for group_link in result.find_all('a'): link_title = group_link.text.strip().lower() - match = _check_download_link(link_title, group_link['href']) + match = _check_download_link(link_title, group_link['href'], torrent_client_available) if match: group_links.setdefault(match, []).append(group_link['href']) if group_links: @@ -192,18 +235,26 @@ def _extract_get_comics_links( logging.debug(f'Download groups: {download_groups}') return download_groups -def _sort_link_paths(p: List[dict]) -> int: - """Sort the link paths. TPB's are sorted highest, then from most downloads to least. +def _sort_link_paths(p: List[dict]) -> Tuple[float, int]: + """Sort the link paths. TPB's are sorted highest, then from largest range to least. Args: p (List[dict]): A link path Returns: - int: The rating (lower is better) + Tuple[float, int]: The rating (lower is better) """ if p[0]['info']['special_version']: - return 0 - return 1 / len(p) + return (0.0, 0) + + issues_covered = 0 + for entry in p: + if isinstance(entry['info']['issue_number'], float): + issues_covered += 1 + elif isinstance(entry['info']['issue_number'], tuple): + issues_covered += (entry['info']['issue_number'][1] - entry['info']['issue_number'][0]) + + return (1 / issues_covered, len(p)) def _create_link_paths( download_groups: Dict[str, Dict[str, List[str]]], @@ -410,9 +461,7 @@ def _test_paths( for links in download['links'].values(): for link in links: try: - # Maybe make purify link async so that all links can be purified 'at the same time'? - # https://www.youtube.com/watch?v=nFn4_nA_yk8&t=1053s - # https://stackoverflow.com/questions/53336675/get-aiohttp-results-as-string + ## Maybe make purify link async so that all links can be purified 'at the same time'? pure_link = _purify_link(link) dl_instance = pure_link['target']( link=pure_link['link'], @@ -440,10 +489,11 @@ def _test_paths( if download['info']['special_version']: # Download is essential for group and it doesn't work so try next path break - else: - continue else: - break + if downloads: + break + else: + continue downloads = [] logging.debug(f'Chosen links: {downloads}') @@ -473,24 +523,22 @@ def _extract_download_links(link: str, volume_id: int, issue_id: int=None) -> Tu r = get(link, headers={'user-agent': 'Kapowarr'}, stream=True) if not r.ok: raise requests_ConnectionError + except requests_ConnectionError: # Link broken add_to_blocklist(link, 1) - if link.startswith(private_settings['getcomics_url']): - # Link is to a getcomics page - soup = BeautifulSoup(r.text, 'html.parser') - - # Extract the download groups and filter invalid links - download_groups = _extract_get_comics_links(soup) + # Link is to a getcomics page + soup = BeautifulSoup(r.text, 'html.parser') - # Filter incorrect download groups and combine them (or not) to create download paths - link_paths = _create_link_paths( - download_groups, - volume_id - ) + # Extract the download groups and filter invalid links + download_groups = _extract_get_comics_links(soup) - # Decide which path to take by testing the links - return _test_paths(link_paths, volume_id) + # Filter incorrect download groups and combine them (or not) to create download paths + link_paths = _create_link_paths( + download_groups, + volume_id + ) - return [], False + # Decide which path to take by testing the links + return _test_paths(link_paths, volume_id) diff --git a/backend/post_processing.py b/backend/post_processing.py index 30b5b614..7570fca4 100644 --- a/backend/post_processing.py +++ b/backend/post_processing.py @@ -4,162 +4,295 @@ """ import logging -from abc import ABC, abstractmethod from os import remove -from os.path import basename, isfile, join +from os.path import basename, exists, join from shutil import move, rmtree from time import time from typing import List, Tuple from zipfile import ZipFile from backend.db import get_db -from backend.files import extract_filename_data, image_extensions, rename_file +from backend.files import (_list_files, extract_filename_data, + image_extensions, rename_file, supported_extensions) from backend.naming import mass_rename from backend.search import _check_matching_titles from backend.volumes import Volume, scan_files zip_extract_folder = '.zip_extract' -class PostProcessor(ABC): - @abstractmethod - def __init__(self, download): - return - - def short(self) -> None: - return - - def full(self) -> None: - return - - def error(self) -> None: - return - -class PostProcessing(PostProcessor): - """For processing a file after downloading it - """ - def __init__(self, download, queue: list) -> None: - """Setup a post processor for the download - - Args: - download (Download): The download queue entry for which to setup the processor. - Value should be from download.DownloadHandler.queue - queue (List[dict]): The download queue. Value should be download.DownloadHandler.queue - """ - self.actions_short = [ - self._delete_file - ] - - self.actions_full = [ - self._remove_from_queue, - self._add_to_history, - self._move_file, - self._add_file_to_database, - self._unzip_file - ] - - self.actions_error = [ - self._remove_from_queue, - self._add_to_history, - self._delete_file - ] - - self.download = download - self.queue = queue - return - - def _remove_from_queue(self) -> None: - """Delete the download from the queue in the database - """ - for entry in self.queue: - if entry.db_id == self.download.db_id and entry.id != self.download.id: - break - else: - get_db().execute( - "DELETE FROM download_queue WHERE id = ?", - (self.download.db_id,) - ) +class PostProcessingActions: + @staticmethod + def remove_from_queue(download) -> None: + "Delete the download from the queue in the database" + get_db().execute( + "DELETE FROM download_queue WHERE id = ?", + (download.id,) + ) return - def _add_to_history(self) -> None: - """Add the download to history in the database - """ + @staticmethod + def add_to_history(download) -> None: + "Add the download to history in the database" get_db().execute( """ INSERT INTO download_history(original_link, title, downloaded_at) VALUES (?,?,?); """, - (self.download.page_link, self.download.title, round(time())) + (download.page_link, download.title, round(time())) ) return - - def _move_file(self) -> None: - """Move file from download folder to final destination - """ - logging.debug(f'Moving download to final destination: {self.download}') - if isfile(self.download.file): + + @staticmethod + def move_file(download) -> None: + "Move file from download folder to final destination" + if exists(download.file): folder = get_db().execute( "SELECT folder FROM volumes WHERE id = ? LIMIT 1", - (self.download.volume_id,) + (download.volume_id,) ).fetchone()[0] - file_dest = join(folder, basename(self.download.file)) - if isfile(file_dest): - logging.warning(f'The file {file_dest} already exists; replacing with downloaded file') + file_dest = join(folder, basename(download.file)) + logging.debug( + f'Moving download to final destination: {download}, Dest: {file_dest}' + ) + + if exists(file_dest): + logging.warning( + f'The file {file_dest} already exists; replacing with downloaded file' + ) remove(file_dest) - move(self.download.file, file_dest) - self.download.file = file_dest + + move(download.file, file_dest) + download.file = file_dest return - - def _unzip_file(self) -> None: - if self.download.file.lower().endswith('.zip'): - unzip = get_db().execute("SELECT value FROM config WHERE key = 'unzip';").fetchone()[0] + + @staticmethod + def unzip_file(download) -> None: + "Unzip the file" + if download.file.lower().endswith('.zip'): + unzip = get_db().execute( + "SELECT value FROM config WHERE key = 'unzip';" + ).fetchone()[0] if unzip: - unzip_volume(self.download.volume_id, self.download.file) + unzip_volume(download.volume_id, download.file) return - def _delete_file(self) -> None: - """Delete file from download folder - """ - if isfile(self.download.file): - remove(self.download.file) + @staticmethod + def delete_file(download) -> None: + "Delete file from download folder" + if exists(download.file): + remove(download.file) return - - def _add_file_to_database(self) -> None: - """Register file in database and match to a volume/issue - """ - scan_files(Volume(self.download.volume_id).get_info()) + + @staticmethod + def add_file_to_database(download) -> None: + "Register file in database and match to a volume/issue" + scan_files(Volume(download.volume_id).get_info()) return - def __run_actions(self, actions: list) -> None: - """Run all actions in the list supplied + @staticmethod + def move_file_torrent(download) -> None: + """Move file downloaded using torrent from download folder to + final destination""" + PPA.move_file(download) + + cursor = get_db('dict') + + files = _extract_files_from_folder( + download.file, + download.volume_id + ) + + scan_files(Volume(download.volume_id).get_info()) + + rename_files = cursor.execute(""" + SELECT value + FROM config + WHERE key = 'rename_downloaded_files' + LIMIT 1; + """).fetchone()[0] + + if rename_files and files: + mass_rename(download.volume_id, filepath_filter=files) + + return + +PPA = PostProcessingActions +"""Rename of PostProcessingActions to make local code less cluttered. +Advised to use the name `PostProcessingActions` outside of this file.""" + +class PostProcesser: + actions_success = [ + PPA.remove_from_queue, + PPA.add_to_history, + PPA.move_file, + PPA.add_file_to_database, + PPA.unzip_file + ] + + actions_canceled = [ + PPA.delete_file, + PPA.remove_from_queue + ] - Args: - actions (list): A list of actions that should be run on the file - """ + actions_shutdown = [ + PPA.delete_file + ] + + actions_failed = [ + PPA.remove_from_queue, + PPA.add_to_history, + PPA.delete_file + ] + + @staticmethod + def __run_actions(actions: list, download) -> None: for action in actions: - action() + action(download) return - def short(self) -> None: - """Process the file with the 'short'-program. Intended for when the application is shutting down. - """ - logging.info(f'Post-download short processing: {self.download.id}') - self.__run_actions(self.actions_short) - return - - def full(self) -> None: - """Process the file with the 'full'-program. Intended for standard handling of the file. - """ - logging.info(f'Post-download processing: {self.download.id}') - self.__run_actions(self.actions_full) + @classmethod + def success(cls, download) -> None: + logging.info(f'Postprocessing of successful download: {download.id}') + cls.__run_actions(cls.actions_success, download) return - - def error(self) -> None: - """Process the file with the 'error'-program. Intended for when the download had an error. - """ - logging.info(f'Post-download error processing: {self.download.id}') - self.__run_actions(self.actions_error) + + @classmethod + def canceled(cls, download) -> None: + logging.info(f'Postprocessing of canceled download: {download.id}') + cls.__run_actions(cls.actions_canceled, download) + return + + @classmethod + def shutdown(cls, download) -> None: + logging.info(f'Postprocessing of shut down download: {download.id}') + cls.__run_actions(cls.actions_shutdown, download) + return + + @classmethod + def failed(cls, download) -> None: + logging.info(f'Postprocessing of failed download: {download.id}') + cls.__run_actions(cls.actions_failed, download) return +class PostProcesserTorrents(PostProcesser): + actions_success = [ + PPA.remove_from_queue, + PPA.add_to_history, + PPA.move_file_torrent, + PPA.unzip_file + ] + +def __get_volume_data(volume_id: int) -> dict: + """Get info about the volume based on ID + + Args: + volume_id (int): The ID of the volume. + + Returns: + dict: The data + """ + volume_data = dict(get_db('dict').execute(""" + SELECT + v.id, + v.title, year, + volume_number, + folder, + special_version, + MAX(i.date) AS last_issue_date + FROM volumes v + INNER JOIN issues i + ON v.id = i.volume_id + WHERE v.id = ? + LIMIT 1; + """, + (volume_id,) + ).fetchone()) + + volume_data['annual'] = 'annual' in volume_data['title'].lower() + if volume_data['last_issue_date']: + volume_data['end_year'] = int(volume_data['last_issue_date'].split('-')[0]) + else: + volume_data['end_year'] = volume_data['year'] + + return volume_data + +def _extract_files_from_folder( + source_folder: str, + volume_id: int +) -> List[str]: + volume_data = __get_volume_data(volume_id) + folder_contents = _list_files(source_folder, supported_extensions) + + cursor = get_db() + + # Filter non-relevant files + rel_files: List[Tuple[str, dict]] = [] + rel_files_append = rel_files.append + for c in folder_contents: + if 'variant cover' in c.lower(): + continue + + result = extract_filename_data(c, False) + if (_check_matching_titles(result['series'], volume_data['title']) + and ( + # Year has to match + (result['year'] is not None + and volume_data['year'] - 1 <= result['year'] <= volume_data['end_year'] + 1) + # Or volume number + or (result['volume_number'] is not None + and (( + isinstance(result['volume_number'], int) + and result['volume_number'] == volume_data['volume_number'] + ) + or ( + volume_data[4] == 'volume-as-issue' + and cursor.execute( + "SELECT 1 FROM issues WHERE volume_id = ? AND calculated_issue_number = ? LIMIT 1;", + ( + volume_data['id'], + result['volume_number'] + if isinstance(result['volume_number'], int) else + result['volume_number'][0] + ) + ).fetchone() + ) + )) + # Or neither should be found (we play it safe so we keep those) + or (result['year'] is None and result['volume_number'] is None) + ) + and result['annual'] == volume_data['annual']): + rel_files_append((c, result)) + logging.debug(f'Relevant files: {rel_files}') + + # Delete non-relevant files + for c in folder_contents: + if not any(r for r in rel_files if r[0] == c): + remove(c) + + # Move remaining files to main folder and delete source folder + result = [] + result_append = result.append + for c, c_info in rel_files: + if c.endswith(image_extensions): + intermediate_folder = (f'{volume_data["title"]} ({volume_data["year"]})' + + f'Volume {c_info["volume_number"] if isinstance(c_info["volume_number"], int) else "-".join(map(str, c_info))}') + + if volume_data['special_version'] and volume_data['special_version'] != 'volume-as-issue': + intermediate_folder += f' {volume_data["special_version"]}' + elif not (volume_data["special_version"] == 'volume-as-issue'): + intermediate_folder += f' {c_info["issue_number"]}' + + dest = join(volume_data["folder"], intermediate_folder, basename(c)) + + else: + dest = join(volume_data["folder"], basename(c)) + + rename_file(c, dest) + result_append(dest) + + rmtree(source_folder, ignore_errors=True) + return result + def unzip_volume(volume_id: int, file: str=None) -> None: """Get the zip files of a volume and unzip them. This process unzips the file, deletes the original zip file, @@ -170,9 +303,11 @@ def unzip_volume(volume_id: int, file: str=None) -> None: file (str, optional): Instead of unzipping all zip files for the volume, only unzip the given file. Defaults to None. """ - cursor = get_db() + cursor = get_db('dict') if file: - logging.info(f'Unzipping the following file for volume {volume_id}: {file}') + logging.info( + f'Unzipping the following file for volume {volume_id}: {file}' + ) files = [file] else: logging.info(f'Unzipping for volume {volume_id}') @@ -191,107 +326,32 @@ def unzip_volume(volume_id: int, file: str=None) -> None: if not files: return - - volume_data = cursor.execute(""" - SELECT - v.title, year, - volume_number, - folder, - special_version, - MAX(i.date) AS last_issue_date - FROM volumes v - INNER JOIN issues i - ON v.id = i.volume_id - WHERE v.id = ? - LIMIT 1; - """, + + volume_folder = cursor.execute( + "SELECT folder FROM volumes WHERE id = ? LIMIT 1;", (volume_id,) - ).fetchone() - annual = 'annual' in volume_data[0].lower() - end_year = int(volume_data[5].split('-')[0]) if volume_data[5] else volume_data[1] + ).fetchone()['folder'] # All zip files gathered, now handle them one by one - resulting_files = [] - resulting_files_append = resulting_files.append - zip_folder = join(volume_data[3], zip_extract_folder) + resulting_files: List[str] = [] + zip_folder = join(volume_folder, zip_extract_folder) for f in files: logging.debug(f'Unzipping {f}') # 1. Unzip with ZipFile(f, 'r') as zip: - contents = [join(zip_folder, c) for c in zip.namelist() if not c.endswith('/')] - logging.debug(f'Zip contents: {contents}') zip.extractall(zip_folder) # 2. Delete original file remove(f) - # 3. Filter non-relevant files - rel_files = [] - rel_files_append = rel_files.append - for c in contents: - if 'variant cover' in c.lower(): - continue - - result = extract_filename_data(c, False) - if (_check_matching_titles(result['series'], volume_data[0]) - and ( - # Year has to match - (result['year'] is not None - and volume_data[1] - 1 <= result['year'] <= end_year + 1) - # Or volume number - or (result['volume_number'] is not None - and (( - isinstance(result['volume_number'], int) - and result['volume_number'] == volume_data[2] - ) - or ( - volume_data[4] == 'volume-as-issue' - and cursor.execute( - "SELECT 1 FROM issues WHERE volume_id = ? AND calculated_issue_number = ? LIMIT 1;", - ( - volume_id, - result['volume_number'] - if isinstance(result['volume_number'], int) else - result['volume_number'][0] - ) - ).fetchone() - ) - )) - # Or neither should be found (we play it safe so we keep those) - or (result['year'] is None and result['volume_number'] is None) - ) - and result['annual'] == annual): - rel_files_append((c, result)) - logging.debug(f'Zip relevant files: {rel_files}') - - # 4. Delete non-relevant files - for c in contents: - if not any(r for r in rel_files if r[0] == c): - remove(c) - - # 5. Move remaining files to main folder and delete zip folder - rel_files: List[Tuple[str, dict]] - for c, c_info in rel_files: - if c.endswith(image_extensions): - intermediate_folder = (f'{volume_data[0]} ({volume_data[1]})' - + f'Volume {c_info["volume_number"] if isinstance(c_info["volume_number"], int) else "-".join(map(str, c_info))}') - - if volume_data[4] and volume_data[4] != 'volume-as-issue': - intermediate_folder += f' {volume_data[4]}' - elif not (volume_data[4] == 'volume-as-issue'): - intermediate_folder += f' {c_info["issue_number"]}' - - dest = join(volume_data[3], intermediate_folder, basename(c)) - - else: - dest = join(volume_data[3], basename(c)) - - rename_file(c, dest) - resulting_files_append(dest) - rmtree(zip_folder, ignore_errors=True) - - # 6. Rename remaining files + # 3. Filter files and pull matching ones out of folder into volume folder + resulting_files += _extract_files_from_folder( + zip_folder, + volume_id + ) + + # 4. Rename remaining files scan_files(Volume(volume_id).get_info()) if resulting_files: mass_rename(volume_id, filepath_filter=resulting_files) diff --git a/backend/settings.py b/backend/settings.py index 25cdd16e..bdeaf679 100644 --- a/backend/settings.py +++ b/backend/settings.py @@ -45,7 +45,9 @@ 'getcomics_url': 'https://getcomics.org', 'hosting_threads': 10, 'version': 'v1.0.0-beta-3', - 'python_version': ".".join(str(i) for i in list(version_info)) + 'python_version': ".".join(str(i) for i in list(version_info)), + 'torrent_update_interval': 5, # Seconds + 'torrent_tag': 'kapowarr' } about_data = { @@ -72,9 +74,10 @@ credential_sources = ('mega',) -supported_source_strings = (('mega', 'mega link'), - ('mediafire', 'mediafire link'), - ('getcomics', 'download now','main server','mirror download','link 1','link 2')) +supported_source_strings = (('mega',), + ('mediafire',), + ('getcomics', 'download now', 'main server', 'mirror download', 'link 1', 'link 2'), + ('getcomics (torrent)', 'torrent')) def update_manifest(url_base: str) -> None: with open(folder_path('frontend', 'static', 'json', 'manifest.json'), 'r+') as f: @@ -94,7 +97,9 @@ def get_settings(self, use_cache: bool=True) -> dict: """Get all settings and their values Args: - use_cache (bool, optional): Wether or not to use the cache instead of going to the database. Defaults to True. + use_cache (bool, optional): Wether or not to use the cache instead of + going to the database. + Defaults to True. Returns: dict: All settings and their values @@ -123,7 +128,8 @@ def set_settings(self, settings: dict) -> dict: InvalidSettingKey: The key isn't recognised Returns: - dict: The settings and their new values. Same format as settings.Settings.get_settings() + dict: The settings and their new values. + Same format as `settings.Settings.get_settings()`. """ from backend.naming import check_format @@ -151,7 +157,8 @@ def set_settings(self, settings: dict) -> dict: raise InvalidSettingValue(key, value) value = int(value) - elif key in ('volume_folder_naming','file_naming','file_naming_tpb','file_naming_empty'): + elif key in ('volume_folder_naming','file_naming', + 'file_naming_tpb','file_naming_empty'): check_format(value, key) elif key == 'log_level' and not value in log_levels: @@ -203,7 +210,8 @@ def reset_setting(self, key: str) -> dict: InvalidSettingKey: The key isn't recognised Returns: - dict: The settings and their new values. Same format as settings.Settings.get_settings() + dict: The settings and their new values. + Same format as `settings.Settings.get_settings()`. """ logging.debug(f'Setting reset: {key}') if not key in default_settings: @@ -220,7 +228,8 @@ def generate_api_key(self) -> dict: """Generate a new api key Returns: - dict: The settings and their new value. Same format as settings.Settings.get_settings() + dict: The settings and their new value. + Same format as `settings.Settings.get_settings()`. """ logging.debug('Generating new api key') api_key = urandom(16).hex() @@ -250,7 +259,7 @@ def set_service_preference(self, order: List[str]) -> None: """Update the service preference Args: - order (List[str]): A list with the services, in order of preference + order (List[str]): A list with the services, in order of preference. """ logging.info(f'Updating service preference: {order}') cursor = get_db() diff --git a/backend/tasks.py b/backend/tasks.py index afbdb50e..678007be 100644 --- a/backend/tasks.py +++ b/backend/tasks.py @@ -12,7 +12,7 @@ from backend.custom_exceptions import (InvalidComicVineApiKey, TaskNotDeletable, TaskNotFound) from backend.db import get_db -from backend.download import DownloadHandler +from backend.download_queue import DownloadHandler from backend.post_processing import unzip_volume from backend.search import auto_search from backend.volumes import refresh_and_scan @@ -106,7 +106,10 @@ def run(self) -> List[tuple]: # Get search results and download them results = auto_search(self.volume_id, self.issue_id) if results: - return [(result['link'], self.volume_id, self.issue_id) for result in results] + return [ + (result['link'], self.volume_id, self.issue_id) + for result in results + ] return [] #===================== @@ -273,7 +276,7 @@ def __init__(self, context, download_handler: DownloadHandler) -> None: Args: context (Flask): A Flask app instance - download_handler (DownloadHandler): An instance of the `download.DownloadHandler` class + download_handler (DownloadHandler): An instance of `download.DownloadHandler` to which any download instructions are sent """ self.context = context.app_context @@ -317,9 +320,9 @@ def __run_task(self, task: Task) -> None: return def _process_queue(self) -> None: - """Handle the queue. In the case that there is something in the queue and it isn't already running, - start the task. This can safely be called multiple times while a task is going or while there is - nothing in the queue. + """Handle the queue. In the case that there is something in the queue and + it isn't already running, start the task. This can safely be called + multiple times while a task is going or while there is nothing in the queue. """ if not self.queue: return @@ -345,7 +348,11 @@ def add(self, task: Task) -> int: 'task': task, 'id': id, 'status': 'queued', - 'thread': Thread(target=self.__run_task, args=(task,), name="Task Handler") + 'thread': Thread( + target=self.__run_task, + args=(task,), + name="Task Handler" + ) } self.queue.append(task_data) logging.info(f'Added task: {task.display_title} ({id})') @@ -428,7 +435,8 @@ def get_all(self) -> List[dict]: """Get all tasks in the queue Returns: - List[dict]: A list with all tasks in the queue (formatted using `self.__format_entry()`) + List[dict]: A list with all tasks in the queue. + Formatted using `self.__format_entry()`. """ return [self.__format_entry(t) for t in self.queue] @@ -442,7 +450,8 @@ def get_one(self, task_id: int) -> dict: TaskNotFound: The id doesn't match with any task in the queue Returns: - dict: The info of the task in the queue (formatted using `self.__format_entry()`) + dict: The info of the task in the queue. + Formatted using `self.__format_entry()`. """ for entry in self.queue: if entry['id'] == task_id: @@ -477,7 +486,10 @@ def get_task_history(offset: int=0) -> List[dict]: """Get the task history in blocks of 50. Args: - offset (int, optional): The offset of the list. The higher the number, the deeper into history you go. Defaults to 0. + offset (int, optional): The offset of the list. + The higher the number, the deeper into history you go. + + Defaults to 0. Returns: List[dict]: The history entries. diff --git a/backend/torrent_clients/qBittorrent.py b/backend/torrent_clients/qBittorrent.py new file mode 100644 index 00000000..a0adb119 --- /dev/null +++ b/backend/torrent_clients/qBittorrent.py @@ -0,0 +1,124 @@ +#-*- coding: utf-8 -*- + +from re import IGNORECASE, compile +from typing import Union + +from requests import Session, get +from requests.exceptions import RequestException + +from backend.download_general import BaseTorrentClient, DownloadStates +from backend.settings import private_settings + +filename_magnet_link = compile(r'(?<=&dn=).*?(?=&)', IGNORECASE) +hash_magnet_link = compile(r'(?<=urn:btih:)\w+?(?=&)', IGNORECASE) + +class qBittorrent(BaseTorrentClient): + _tokens = ('title', 'base_url', 'username', 'password') + + def __init__(self, id: int) -> None: + super().__init__(id) + + self.ssn = Session() + + if self.username and self.password: + params = { + 'username': self.username, + 'password': self.password + } + else: + params = {} + + self.ssn.get( + f'{self.base_url}/api/v2/auth/login', + params=params + ) + + return + + def add_torrent(self, + magnet_link: str, + target_folder: str, + torrent_name: Union[str, None] + ) -> int: + if torrent_name is not None: + magnet_link = filename_magnet_link.sub(torrent_name, magnet_link) + + params = { + 'urls': magnet_link, + 'savepath': target_folder, + 'category': private_settings['torrent_tag'] + } + + self.ssn.get( + f'{self.base_url}/api/v2/torrents/add', + params=params + ) + + return hash_magnet_link.search(magnet_link).group(0) + + def get_torrent_status(self, torrent_id: int) -> dict: + result = self.ssn.get( + f'{self.base_url}/api/v2/torrents/properties', + params={'hash': torrent_id} + ).json() + + if result['pieces_have'] <= 0: + state = DownloadStates.QUEUED_STATE + + elif result['completion_date'] == -1: + state = DownloadStates.DOWNLOADING_STATE + + elif result['eta'] != 8640000: + state = DownloadStates.SEEDING_STATE + + else: + state = DownloadStates.IMPORTING_STATE + + return { + 'size': result['total_size'], + 'progress': round( + (result['total_downloaded'] - result['total_wasted']) + / + result['total_size'] * 100, + + 2 + ), + 'speed': result['dl_speed'], + 'state': state + } + + def delete_torrent(self, torrent_id: int, delete_files: bool) -> None: + self.ssn.get( + f'{self.base_url}/api/v2/torrents/delete', + params={ + 'hashes': torrent_id, + 'deleteFiles': delete_files + } + ) + return + + @staticmethod + def test( + base_url: str, + username: Union[str, None] = None, + password: Union[str, None] = None, + api_token: Union[str, None] = None + ) -> bool: + try: + if username and password: + params = { + 'username': username, + 'password': password + } + else: + params = {} + + cookie = get( + f'{base_url}/api/v2/auth/login', + params=params + ).headers.get('set-cookie') + + return cookie is not None + + except RequestException: + return False diff --git a/frontend/api.py b/frontend/api.py index 54179e3b..447a630d 100644 --- a/frontend/api.py +++ b/frontend/api.py @@ -19,11 +19,16 @@ InvalidSettingValue, IssueNotFound, KeyNotFound, RootFolderInUse, RootFolderNotFound, TaskNotDeletable, - TaskNotFound, VolumeAlreadyAdded, - VolumeDownloadedFor, VolumeNotFound) + TaskNotFound, TorrentClientDownloading, + TorrentClientNotFound, + TorrentClientNotWorking, + VolumeAlreadyAdded, VolumeDownloadedFor, + VolumeNotFound) from backend.db import close_db -from backend.download import (DownloadHandler, credentials, - delete_download_history, get_download_history) +from backend.download_direct_clients import credentials +from backend.download_queue import (DownloadHandler, delete_download_history, + get_download_history) +from backend.download_torrent_clients import TorrentClients, client_types from backend.library_import import import_library, propose_library_import from backend.naming import (generate_volume_folder_name, mass_rename, preview_mass_rename) @@ -54,19 +59,24 @@ def error_handler(method): def wrapper(*args, **kwargs): try: return method(*args, **kwargs) + except (BlocklistEntryNotFound, - CredentialAlreadyAdded, - CredentialInvalid, CredentialNotFound, - CredentialSourceNotFound, - CVRateLimitReached, DownloadNotFound, - FolderNotFound, InvalidComicVineApiKey, - InvalidKeyValue, InvalidSettingKey, - InvalidSettingModification, - InvalidSettingValue, IssueNotFound, - KeyNotFound, RootFolderInUse, - RootFolderNotFound, TaskNotDeletable, - TaskNotFound, VolumeAlreadyAdded, - VolumeDownloadedFor, VolumeNotFound) as e: + CredentialAlreadyAdded, + CredentialInvalid, CredentialNotFound, + CredentialSourceNotFound, + CVRateLimitReached, DownloadNotFound, + FolderNotFound, InvalidComicVineApiKey, + InvalidKeyValue, InvalidSettingKey, + InvalidSettingModification, + InvalidSettingValue, IssueNotFound, + KeyNotFound, RootFolderInUse, + RootFolderNotFound, TaskNotDeletable, + TaskNotFound, TorrentClientDownloading, + TorrentClientNotFound, + TorrentClientNotWorking, + VolumeAlreadyAdded, VolumeDownloadedFor, + VolumeNotFound + ) as e: return return_api(**e.api_response) wrapper.__name__ = method.__name__ @@ -135,7 +145,11 @@ def extract_key(request, key: str, check_existence: bool=True) -> Any: value = False else: raise InvalidKeyValue(key, value) - + + elif key == 'type': + if not value in client_types: + raise InvalidKeyValue(key, value) + else: # Default value if key == 'sort': @@ -656,3 +670,75 @@ def api_credential(id: int): elif request.method == 'DELETE': credentials.delete(id) return return_api({}) + +#===================== +# Torrent Clients +#===================== +@api.route('/torrentclients', methods=['GET', 'POST']) +@error_handler +@auth +def api_torrent_clients(): + if request.method == 'GET': + result = TorrentClients.get_clients() + return return_api(result) + + elif request.method == 'POST': + data: dict = request.get_json() + data = { + k: data.get(k) + for k in ('type', + 'title', 'base_url', + 'username', 'password', + 'api_token' + ) + } + result = TorrentClients.add(**data).todict() + return return_api(result, code=201) + +@api.route('/torrentclients/options', methods=['GET']) +@error_handler +@auth +def api_torrent_clients_keys(): + result = {k: v._tokens for k, v in client_types.items()} + return return_api(result) + +@api.route('/torrentclients/test', methods=['POST']) +@error_handler +@auth +def api_torrent_clients_test(): + data: dict = request.get_json() + data = { + k: data.get(k) + for k in ('type', 'base_url', + 'username', 'password', + 'api_token' + ) + } + result = TorrentClients.test(**data) + return return_api({'result': result}) + +@api.route('/torrentclients/', methods=['GET', 'PUT', 'DELETE']) +@error_handler +@auth +def api_torrent_client(id: int): + client = TorrentClients.get_client(id) + + if request.method == 'GET': + result = client.todict() + return return_api(result) + + elif request.method == 'PUT': + data: dict = request.get_json() + data = { + k: data.get(k) + for k in ('title', 'base_url', + 'username', 'password', + 'api_token' + ) + } + result = client.edit(data) + return return_api(result) + + elif request.method == 'DELETE': + client.delete() + return return_api({}) diff --git a/frontend/static/css/queue.css b/frontend/static/css/queue.css index 74f3ca8e..722158cb 100644 --- a/frontend/static/css/queue.css +++ b/frontend/static/css/queue.css @@ -20,7 +20,7 @@ th, td { } .status-column { - width: 7.5rem; + width: clamp(7.5rem, 11vw, 10rem); } .number-column { diff --git a/frontend/static/css/settings.css b/frontend/static/css/settings.css index e435d601..f21bb4bb 100644 --- a/frontend/static/css/settings.css +++ b/frontend/static/css/settings.css @@ -11,14 +11,14 @@ /* */ /* Sections titles */ /* */ -h2 { +main h2 { font-size: 1.5rem; border-bottom: 1px solid var(--border-color); margin-bottom: 1rem; font-weight: 500; } -h2:not(:first-of-type) { +main h2:not(:first-of-type) { margin-top: 1rem; } @@ -215,6 +215,149 @@ h2:not(:first-of-type) { height: 1.2rem; } +/* */ +/* Download Client List */ +/* */ +.client-list { + display: flex; + gap: 1rem; + flex-wrap: wrap; +} + +.client-list > button { + position: relative; + width: 15rem; + height: 6rem; + + border-radius: 6px; + padding-inline: 1rem; + background-color: var(--library-entry-color); + color: var(--library-entry-font-color); + + overflow-x: hidden; + font-size: 1.2rem; + + box-shadow: 0 0 9px 0px #00000075; +} + +.client-list > button::after { + content: ''; + position: absolute; + inset: 0 0 0 auto; + + box-shadow: 0px 0px 20px 36px var(--library-entry-color); +} + +.add-client-button { + display: flex; + justify-content: center; + align-items: center; +} + +.add-client-button > img { + width: 2rem; + transform: rotate(45deg); +} + +.window > section > div:nth-child(3) > button { + background-color: var(--error-color); +} + +.window > section > div:nth-child(3) > button[type="submit"] { + background-color: var(--success-color); +} + +.window > section > div:nth-child(2) p.error { + text-align: center; +} + +.window > section > div:nth-child(2) form { + width: 60%; +} + +#test-torrent-edit, +#test-torrent-add { + position: relative; + width: 5rem; + display: flex; + overflow-x: hidden; + background-color: var(--tool-bar-color); + transition: background-color 300ms linear; +} + +#test-torrent-edit > div, +#test-torrent-add > div { + position: absolute; + width: 3rem; + transition: left 100ms linear; +} + +#test-torrent-edit > div:first-child, +#test-torrent-add > div:first-child { + left: -100%; +} + +#test-torrent-edit > div:nth-child(2), +#test-torrent-add > div:nth-child(2) { + left: 20%; +} + +#test-torrent-edit > div:last-child, +#test-torrent-add > div:last-child { + left: 100%; +} + +#test-torrent-edit.show-success, +#test-torrent-add.show-success { + background-color: var(--success-color); +} +#test-torrent-edit.show-success > div:nth-child(2), +#test-torrent-add.show-success > div:nth-child(2) { + left: -100%; +} +#test-torrent-edit.show-success > div:last-child, +#test-torrent-add.show-success > div:last-child { + left: 20%; +} + +#test-torrent-edit.show-fail, +#test-torrent-add.show-fail { + background-color: var(--error-color); +} +#test-torrent-edit.show-fail > div:nth-child(2), +#test-torrent-add.show-fail > div:nth-child(2) { + left: 100%; +} +#test-torrent-edit.show-fail > div:first-child, +#test-torrent-add.show-fail > div:first-child { + left: 20%; +} + +#choose-torrent-window > div:nth-child(2) { + justify-content: flex-start; +} + +#choose-torrent-list { + display: flex; + justify-content: center; + gap: 1rem; + flex-wrap: wrap; +} + +#choose-torrent-list > button { + width: 10rem; + height: 4rem; + + border-radius: 4px; + padding: 1rem 2rem; + background-color: var(--library-entry-color); + color: var(--library-entry-font-color); + + font-size: 1rem; + + box-shadow: 0 0 9px 0px #00000075; +} + @media (max-width: 1000px) { table.fold tbody { display: flex; diff --git a/frontend/static/css/window.css b/frontend/static/css/window.css index 5875dc7d..c2ca519a 100644 --- a/frontend/static/css/window.css +++ b/frontend/static/css/window.css @@ -100,7 +100,7 @@ } .window > section > div:nth-child(2) form table tr > td > select, -.window > section > div:nth-child(2) form table tr > td > input[type="text"] { +.window > section > div:nth-child(2) form table tr > td > input { width: 90%; padding: .5rem; border: 2px solid var(--border-color); @@ -122,6 +122,7 @@ } .window > section > div:nth-child(3) > button { + height: 2rem; padding: .5rem 1rem; border-radius: 2px; color: var(--light-color); diff --git a/frontend/static/js/settings_download_clients.js b/frontend/static/js/settings_download_clients.js new file mode 100644 index 00000000..1fae9075 --- /dev/null +++ b/frontend/static/js/settings_download_clients.js @@ -0,0 +1,317 @@ +function createUsernameInput(id) { + const username_row = document.createElement('tr'); + const username_header = document.createElement('th'); + const username_label = document.createElement('label'); + username_label.innerText = 'Username'; + username_label.setAttribute('for', id); + username_header.appendChild(username_label); + username_row.appendChild(username_header) + const username_container = document.createElement('td'); + const username_input = document.createElement('input'); + username_input.type = 'text' + username_input.id = id; + username_container.appendChild(username_input); + username_row.appendChild(username_container); + return username_row; +}; + +function createPasswordInput(id) { + const password_row = document.createElement('tr'); + const password_header = document.createElement('th'); + const password_label = document.createElement('label'); + password_label.innerText = 'Password'; + password_label.setAttribute('for', id); + password_header.appendChild(password_label); + password_row.appendChild(password_header) + const password_container = document.createElement('td'); + const password_input = document.createElement('input'); + password_input.type = 'password' + password_input.id = id; + password_container.appendChild(password_input); + password_row.appendChild(password_container); + return password_row; +}; + +function createApiTokenInput(id) { + const token_row = document.createElement('tr'); + const token_header = document.createElement('th'); + const token_label = document.createElement('label'); + token_label.innerText = 'API Token'; + token_label.setAttribute('for', id); + token_header.appendChild(token_label); + token_row.appendChild(token_header) + const token_container = document.createElement('td'); + const token_input = document.createElement('input'); + token_input.type = 'text' + token_input.id = id; + token_container.appendChild(token_input); + token_row.appendChild(token_container); + return token_row; +}; + +function loadEditTorrent(api_key, id) { + const form = document.querySelector('#edit-torrent-form tbody'); + form.dataset.id = id; + form.querySelectorAll( + 'tr:not(:has(input#edit-title-input, input#edit-baseurl-input))' + ).forEach(el => el.remove()); + document.querySelector('#test-torrent-edit').classList.remove( + 'show-success', 'show-fail' + ) + document.querySelector('#edit-torrent-window > div > p.error') + .classList.add('hidden'); + + fetch(`${url_base}/api/torrentclients/${id}?api_key=${api_key}`) + .then(response => response.json()) + .then(client_data => { + const client_type = client_data.result.type; + form.dataset.type = client_type; + fetch(`${url_base}/api/torrentclients/options?api_key=${api_key}`) + .then(response => response.json()) + .then(options => { + const client_options = options.result[client_type]; + + form.querySelector('#edit-title-input').value = + client_data.result.title || ''; + + form.querySelector('#edit-baseurl-input').value = + client_data.result.base_url; + + if (client_options.includes('username')) { + const username_input = createUsernameInput('edit-username-input'); + username_input.querySelector('input').value = + client_data.result.username || ''; + form.appendChild(username_input); + }; + + if (client_options.includes('password')) { + const password_input = createPasswordInput('edit-password-input'); + password_input.querySelector('input').value = + client_data.result.password || ''; + form.appendChild(password_input); + }; + + if (client_options.includes('api_token')) { + const token_input = createApiTokenInput('edit-token-input'); + token_input.querySelector('input').value = + client_data.result.api_token || ''; + form.appendChild(token_input); + }; + + showWindow('edit-torrent-window'); + }); + }); +}; + +function saveEditTorrent() { + usingApiKey() + .then(api_key => { + testEditTorrent(api_key).then(result => { + if (!result) + return; + + const form = document.querySelector('#edit-torrent-form tbody'); + const id = form.dataset.id; + const data = { + title: form.querySelector('#edit-title-input').value, + base_url: form.querySelector('#edit-baseurl-input').value, + username: form.querySelector('#edit-username-input')?.value || null, + password: form.querySelector('#edit-password-input')?.value || null, + api_token: form.querySelector('#edit-token-input')?.value || null + }; + fetch(`${url_base}/api/torrentclients/${id}?api_key=${api_key}`, { + 'method': 'PUT', + 'headers': {'Content-Type': 'application/json'}, + 'body': JSON.stringify(data) + }) + .then(response => { + loadTorrentClients(api_key); + closeWindow(); + }); + }); + }); +}; + +async function testEditTorrent(api_key) { + const form = document.querySelector('#edit-torrent-form tbody'); + const test_button = document.querySelector('#test-torrent-edit'); + test_button.classList.remove('show-success', 'show-fail'); + const data = { + type: form.dataset.type, + base_url: form.querySelector('#edit-baseurl-input').value, + username: form.querySelector('#edit-username-input')?.value || null, + password: form.querySelector('#edit-password-input')?.value || null, + api_token: form.querySelector('#edit-token-input')?.value || null, + }; + return await fetch(`${url_base}/api/torrentclients/test?api_key=${api_key}`, { + 'method': 'POST', + 'headers': {'Content-Type': 'application/json'}, + 'body': JSON.stringify(data) + }) + .then(response => response.json()) + .then(json => { + if (json.result.result) + // Test successful + test_button.classList.add('show-success'); + else + // Test failed + test_button.classList.add('show-fail'); + return json.result.result; + }); +}; + +function deleteTorrent(api_key) { + const id = document.querySelector('#edit-torrent-form tbody').dataset.id; + fetch(`${url_base}/api/torrentclients/${id}?api_key=${api_key}`, { + 'method': 'DELETE' + }) + .then(response => { + if (!response.ok) Promise.reject(response.status); + loadTorrentClients(api_key); + closeWindow(); + }) + .catch(e => { + if (e === 400) { + // Client is downloading + document.querySelector('#edit-torrent-window > div > p.error') + .classList.remove('hidden'); + }; + }); +}; + +function loadTorrentList(api_key) { + const table = document.querySelector('#choose-torrent-list'); + table.innerHTML = ''; + + fetch(`${url_base}/api/torrentclients/options?api_key=${api_key}`) + .then(response => response.json()) + .then(json => { + Object.keys(json.result).forEach(c => { + const entry = document.createElement('button'); + entry.innerText = c; + entry.onclick = (e) => loadAddTorrent(api_key, c); + table.appendChild(entry); + }); + showWindow('choose-torrent-window'); + }); +}; + +function loadAddTorrent(api_key, type) { + const form = document.querySelector('#add-torrent-form tbody'); + form.dataset.type = type; + form.querySelectorAll( + 'tr:not(:has(input#add-title-input, input#add-baseurl-input))' + ).forEach(el => el.remove()); + document.querySelector('#test-torrent-add').classList.remove( + 'show-success', 'show-fail' + ) + + fetch(`${url_base}/api/torrentclients/options?api_key=${api_key}`) + .then(response => response.json()) + .then(json => { + const client_options = json.result[type]; + + if (client_options.includes('username')) + form.appendChild(createUsernameInput('add-username-input')); + + if (client_options.includes('password')) + form.appendChild(createPasswordInput('add-password-input')); + + if (client_options.includes('api_token')) + form.appendChild(createApiTokenInput('add-token-input')); + + showWindow('add-torrent-window'); + }); +}; + +function saveAddTorrent() { + usingApiKey() + .then(api_key => { + testAddTorrent(api_key).then(result => { + if (!result) + return; + + const form = document.querySelector('#add-torrent-form tbody'); + const data = { + type: form.dataset.type, + title: form.querySelector('#add-title-input').value, + base_url: form.querySelector('#add-baseurl-input').value, + username: form.querySelector('#add-username-input')?.value || null, + password: form.querySelector('#add-password-input')?.value || null, + api_token: form.querySelector('#add-token-input')?.value || null + }; + fetch(`${url_base}/api/torrentclients?api_key=${api_key}`, { + 'method': 'POST', + 'headers': {'Content-Type': 'application/json'}, + 'body': JSON.stringify(data) + }) + .then(response => { + loadTorrentClients(api_key); + closeWindow(); + }); + }); + }); +}; + +async function testAddTorrent(api_key) { + const form = document.querySelector('#add-torrent-form tbody'); + const test_button = document.querySelector('#test-torrent-add'); + test_button.classList.remove('show-success', 'show-fail'); + const data = { + type: form.dataset.type, + base_url: form.querySelector('#add-baseurl-input').value, + username: form.querySelector('#add-username-input')?.value || null, + password: form.querySelector('#add-password-input')?.value || null, + api_token: form.querySelector('#add-token-input')?.value || null, + }; + return await fetch(`${url_base}/api/torrentclients/test?api_key=${api_key}`, { + 'method': 'POST', + 'headers': {'Content-Type': 'application/json'}, + 'body': JSON.stringify(data) + }) + .then(response => response.json()) + .then(json => { + if (json.result.result) + // Test successful + test_button.classList.add('show-success'); + else + // Test failed + test_button.classList.add('show-fail'); + return json.result.result; + }); +}; + +function loadTorrentClients(api_key) { + fetch(`${url_base}/api/torrentclients?api_key=${api_key}`) + .then(response => response.json()) + .then(json => { + const table = document.querySelector('#torrent-client-list'); + document.querySelectorAll('#torrent-client-list > :not(:first-child)') + .forEach(el => el.remove()); + + json.result.forEach(client => { + const entry = document.createElement('button'); + entry.onclick = (e) => loadEditTorrent(api_key, client.id); + entry.type = 'button'; + entry.innerText = client.title; + table.appendChild(entry); + }); + }); +}; + +// code run on load + +usingApiKey() +.then(api_key => { + loadTorrentClients(api_key); + document.querySelector('#delete-torrent-edit').onclick = (e) => deleteTorrent(api_key); + document.querySelector('#test-torrent-edit').onclick = (e) => testEditTorrent(api_key); + document.querySelector('#test-torrent-add').onclick = (e) => testAddTorrent(api_key); + document.querySelector('#torrent-client-list > .add-client-button').onclick = (e) => loadTorrentList(api_key); +}); + +document.querySelector('#cancel-torrent-edit').onclick = (e) => closeWindow(); +document.querySelector('#cancel-torrent-add').onclick = (e) => closeWindow(); +document.querySelector('#cancel-torrent-choose').onclick = (e) => closeWindow(); +document.querySelector('#edit-torrent-form').action = 'javascript:saveEditTorrent()'; +document.querySelector('#add-torrent-form').action = 'javascript:saveAddTorrent()'; diff --git a/frontend/templates/settings_download.html b/frontend/templates/settings_download.html index 5781eb9f..0a51a877 100644 --- a/frontend/templates/settings_download.html +++ b/frontend/templates/settings_download.html @@ -33,8 +33,9 @@ Settings Media Management Download + Download Clients General - System + System
@@ -71,6 +72,7 @@

Download Location

Service preference

+

The preference for service to download from when using GetComics as the source.

@@ -94,6 +96,13 @@

Service preference

+ + + +
4 + +

Credentials

diff --git a/frontend/templates/settings_download_clients.html b/frontend/templates/settings_download_clients.html new file mode 100644 index 00000000..664c5374 --- /dev/null +++ b/frontend/templates/settings_download_clients.html @@ -0,0 +1,163 @@ + + + + + + + + + + + + + + + + + + + Download Clients - Kapowarr + + + +
+ +
+
+

Choose Torrent Client

+ +
+
+
+
+
+ +
+
+ +
+
+

Add Torrent Client

+ +
+
+
+ + + + + + + + + +
+ +
+ +

E.g. 'http://192.168.2.15:8008/torrent_client'

+
+
+
+
+ + + +
+
+ +
+
+

Edit Torrent Client

+ +
+
+ +
+ + + + + + + + + +
+ +
+ +

E.g. 'http://192.168.2.15:8008/torrent_client'

+
+
+
+
+ + + + +
+
+
+ +
+ +
+ + + \ No newline at end of file diff --git a/frontend/templates/settings_general.html b/frontend/templates/settings_general.html index bd1d5c96..9669be7a 100644 --- a/frontend/templates/settings_general.html +++ b/frontend/templates/settings_general.html @@ -33,6 +33,7 @@ Settings Media Management Download + Download Clients General System diff --git a/frontend/templates/settings_mediamanagement.html b/frontend/templates/settings_mediamanagement.html index 5632bcb8..902a69a5 100644 --- a/frontend/templates/settings_mediamanagement.html +++ b/frontend/templates/settings_mediamanagement.html @@ -33,6 +33,7 @@ Settings Media Management Download + Download Clients General System diff --git a/frontend/ui.py b/frontend/ui.py index 4a93b3e1..73105cdc 100644 --- a/frontend/ui.py +++ b/frontend/ui.py @@ -60,6 +60,10 @@ def ui_mediamanagement(): def ui_download(): return render_template('settings_download.html', url_base=ui_vars['url_base']) +@ui.route('/settings/downloadclients', methods=methods) +def ui_download_clients(): + return render_template('settings_download_clients.html', url_base=ui_vars['url_base']) + @ui.route('/settings/general', methods=methods) def ui_general(): return render_template('settings_general.html', url_base=ui_vars['url_base'])