diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 1899743b..6fd6563e 100755 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -26,4 +26,4 @@ Please delete options that are not relevant. - [ ] I have performed a self-review of my own code - [ ] I have commented my code, particularly in hard-to-understand areas - [ ] I have added or updated the docstring for new or existing methods -- [ ] I have added tests when applicable +- [ ] I have modified this PR to merge to the develop branch diff --git a/.github/workflows/dependabot-approve-and-auto-merge.yml b/.github/workflows/dependabot-approve-and-auto-merge.yml index 04d0433e..9c059b76 100644 --- a/.github/workflows/dependabot-approve-and-auto-merge.yml +++ b/.github/workflows/dependabot-approve-and-auto-merge.yml @@ -19,7 +19,7 @@ jobs: # will not occur. - name: Dependabot metadata id: dependabot-metadata - uses: dependabot/fetch-metadata@v1.4.0 + uses: dependabot/fetch-metadata@v1.5.0 with: github-token: "${{ secrets.GITHUB_TOKEN }}" # Here the PR gets approved. diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index 6ed64924..a60ab8bd 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -46,6 +46,8 @@ jobs: with: context: ./ file: ./Dockerfile + build-args: | + "BRANCH_NAME=develop" platforms: linux/amd64,linux/arm64,linux/arm/v7 push: true tags: ${{ secrets.DOCKER_HUB_USERNAME }}/qbit_manage:develop diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index 7b6b4b1e..39d7c823 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -14,6 +14,8 @@ jobs: - name: Check Out Repo uses: actions/checkout@v3 + with: + fetch-depth: 0 - name: Login to Docker Hub uses: docker/login-action@v2 diff --git a/CHANGELOG b/CHANGELOG index d09f187f..197cd8b7 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,6 +1,20 @@ +# Requirements Updated +- pre-commit updated to 3.3.3 +- requests updated to 2.31.0 +- ruamel.yaml updated to 0.17.26 +- Adds new dependency bencodepy to generate hash for cross-seed +- Adds new dependency GitPython for checking git branches + # Bug Fixes -- Fixes bug in cross_seed (Fixes #270) -- Bug causing RecycleBin not to be created when full path is defined. (Fixes #271) -- Fixes Uncaught exception while emptying recycle bin (Fixes #272) +- Changes HardLink Logic (Thanks to @ColinHebert for the suggestion) Fixes #291 +- Additional error checking (Fixes #282) +- Fixes #287 (Thanks to @buthed010203 #290) +- Fixes Remove Orphan crashing when multiprocessing (Thanks to @buthed010203 #289) +- Speed optimization for Remove Orphan (Thanks to @buthed010203 #299) +- Fixes Remove Orphan from crashing in Windows (Fixes #275) +- Fixes #292 +- Fixes #201 +- Fixes #279 +- Updates Dockerfile to debloat and move to Python 3.11 -**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v3.6.1...v3.6.2 +**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v3.6.2...v3.6.3 diff --git a/Dockerfile b/Dockerfile index 1cabb91c..1c6afc94 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,29 @@ -FROM python:3.10-alpine - -# install packages -RUN apk add --no-cache gcc g++ libxml2-dev libxslt-dev shadow bash curl wget jq grep sed coreutils findutils unzip p7zip ca-certificates +FROM python:3.11-slim-buster +ARG BRANCH_NAME=master +ENV BRANCH_NAME ${BRANCH_NAME} +ENV TINI_VERSION v0.19.0 +ENV QBM_DOCKER True COPY requirements.txt / -RUN echo "**** install python packages ****" \ +# install packages +RUN echo "**** install system packages ****" \ + && apt-get update \ + && apt-get upgrade -y --no-install-recommends \ + && apt-get install -y tzdata --no-install-recommends \ + && apt-get install -y gcc g++ libxml2-dev libxslt-dev libz-dev bash curl wget jq grep sed coreutils findutils unzip p7zip ca-certificates \ + && wget -O /tini https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-"$(dpkg --print-architecture | awk -F- '{ print $NF }')" \ + && chmod +x /tini \ && pip3 install --no-cache-dir --upgrade --requirement /requirements.txt \ - && rm -rf /requirements.txt /tmp/* /var/tmp/* + && apt-get --purge autoremove gcc g++ libxml2-dev libxslt-dev libz-dev -y \ + && apt-get clean \ + && apt-get update \ + && apt-get check \ + && apt-get -f install \ + && apt-get autoclean \ + && rm -rf /requirements.txt /tmp/* /var/tmp/* /var/lib/apt/lists/* COPY . /app WORKDIR /app VOLUME /config -ENTRYPOINT ["python3", "qbit_manage.py"] +ENTRYPOINT ["/tini", "-s", "python3", "qbit_manage.py", "--"] diff --git a/README.md b/README.md index bf54f823..356ceae9 100755 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ This is a program used to manage your qBittorrent instance such as: * Automatically add [cross-seed](https://github.com/mmgoodnow/cross-seed) torrents in paused state. **\*Note: cross-seed now allows for torrent injections directly to qBit, making this feature obsolete.\*** * Recheck paused torrents sorted by lowest size and resume if completed * Remove orphaned files from your root directory that are not referenced by qBittorrent -* Tag any torrents that have no hard links and allows optional cleanup to delete these torrents and contents based on maximum ratio and/or time seeded +* Tag any torrents that have no hard links outisde the root folder and allows optional cleanup to delete these torrents and contents based on maximum ratio and/or time seeded * RecycleBin function to move files into a RecycleBin folder instead of deleting the data directly when deleting a torrent * Built-in scheduler to run the script every x minutes. (Can use `--run` command to run without the scheduler) * Webhook notifications with [Notifiarr](https://notifiarr.com/) and [Apprise API](https://github.com/caronc/apprise-api) integration diff --git a/VERSION b/VERSION index b7276283..4a788a01 100755 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.6.2 +3.6.3 diff --git a/config/config.yml.sample b/config/config.yml.sample index 68950567..4eca5630 100755 --- a/config/config.yml.sample +++ b/config/config.yml.sample @@ -136,7 +136,7 @@ tracker: tag: other nohardlinks: - # Tag Movies/Series that are not hard linked + # Tag Movies/Series that are not hard linked outside the root directory # Mandatory to fill out directory parameter above to use this function (root_dir/remote_dir) # This variable should be set to your category name of your completed movies/completed series in qbit. Acceptable variable can be any category you would like to tag if there are no hardlinks found movies-completed: diff --git a/modules/__init__.py b/modules/__init__.py index 78f14cd1..c5ce4e3c 100644 --- a/modules/__init__.py +++ b/modules/__init__.py @@ -13,8 +13,10 @@ with open(version_file_path) as f: version_str = f.read().strip() +# Get only the first 3 digits +version_str_split = version_str.rsplit("-", 1)[0] # Convert the version string to a tuple of integers -__version_info__ = tuple(map(int, version_str.split("."))) +__version_info__ = tuple(map(int, version_str_split.split("."))) # Define the version string using the version_info tuple __version__ = ".".join(str(i) for i in __version_info__) diff --git a/modules/config.py b/modules/config.py index f884dc62..b09a0702 100755 --- a/modules/config.py +++ b/modules/config.py @@ -110,27 +110,28 @@ def __init__(self, default_dir, args): self.data["notifiarr"] = self.data.pop("notifiarr") if "webhooks" in self.data: temp = self.data.pop("webhooks") - if "function" not in temp or ("function" in temp and temp["function"] is None): - temp["function"] = {} - - def hooks(attr): - if attr in temp: - items = temp.pop(attr) - if items: - temp["function"][attr] = items - if attr not in temp["function"]: - temp["function"][attr] = {} - temp["function"][attr] = None - - hooks("cross_seed") - hooks("recheck") - hooks("cat_update") - hooks("tag_update") - hooks("rem_unregistered") - hooks("rem_orphaned") - hooks("tag_nohardlinks") - hooks("cleanup_dirs") - self.data["webhooks"] = temp + if temp is not None: + if "function" not in temp or ("function" in temp and temp["function"] is None): + temp["function"] = {} + + def hooks(attr): + if attr in temp: + items = temp.pop(attr) + if items: + temp["function"][attr] = items + if attr not in temp["function"]: + temp["function"][attr] = {} + temp["function"][attr] = None + + hooks("cross_seed") + hooks("recheck") + hooks("cat_update") + hooks("tag_update") + hooks("rem_unregistered") + hooks("rem_orphaned") + hooks("tag_nohardlinks") + hooks("cleanup_dirs") + self.data["webhooks"] = temp if "bhd" in self.data: self.data["bhd"] = self.data.pop("bhd") self.dry_run = self.commands["dry_run"] diff --git a/modules/core/cross_seed.py b/modules/core/cross_seed.py index 80e79244..ffbd6152 100644 --- a/modules/core/cross_seed.py +++ b/modules/core/cross_seed.py @@ -2,6 +2,7 @@ from collections import Counter from modules import util +from modules.torrent_hash_generator import TorrentHashGenerator logger = util.logger @@ -39,7 +40,7 @@ def cross_seed(self): dest = os.path.join(self.qbt.torrentinfo[t_name]["save_path"], "") src = os.path.join(dir_cs, file) dir_cs_out = os.path.join(dir_cs, "qbit_manage_added", file) - category = self.qbt.get_category(dest) + category = self.qbt.torrentinfo[t_name].get("Category", self.qbt.get_category(dest)) # Only add cross-seed torrent if original torrent is complete if self.qbt.torrentinfo[t_name]["is_complete"]: categories.append(category) @@ -67,12 +68,28 @@ def cross_seed(self): self.client.torrents.add( torrent_files=src, save_path=dest, category=category, tags="cross-seed", is_paused=True ) - util.move_files(src, dir_cs_out) + self.qbt.torrentinfo[t_name]["count"] += 1 + try: + torrent_hash_generator = TorrentHashGenerator(src) + torrent_hash = torrent_hash_generator.generate_torrent_hash() + util.move_files(src, dir_cs_out) + except Exception as e: + logger.warning(f"Unable to generate torrent hash from cross-seed {t_name}: {e}") + try: + if torrent_hash: + torrent_info = self.qbt.get_torrents({"torrent_hashes": torrent_hash}) + except Exception as e: + logger.warning(f"Unable to find hash {torrent_hash} in qbt: {e}") + if torrent_info: + torrent = torrent_info[0] + self.qbt.torrentvalid.append(torrent) + self.qbt.torrentinfo[t_name]["torrents"].append(torrent) + self.qbt.torrent_list.append(torrent) else: logger.print_line(f"Found {t_name} in {dir_cs} but original torrent is not complete.", self.config.loglevel) logger.print_line("Not adding to qBittorrent", self.config.loglevel) else: - error = f"{t_name} not found in torrents. Cross-seed Torrent not added to qBittorrent." + error = f"{tr_name} not found in torrents. Cross-seed Torrent not added to qBittorrent." if self.config.dry_run: logger.print_line(error, self.config.loglevel) else: diff --git a/modules/core/remove_orphaned.py b/modules/core/remove_orphaned.py index 0d6f4a49..93dfb4de 100644 --- a/modules/core/remove_orphaned.py +++ b/modules/core/remove_orphaned.py @@ -1,13 +1,10 @@ import os +from concurrent.futures import ThreadPoolExecutor from fnmatch import fnmatch -from itertools import repeat -from multiprocessing import cpu_count -from multiprocessing import Pool from modules import util logger = util.logger -_config = None class RemoveOrphaned: @@ -21,56 +18,34 @@ def __init__(self, qbit_manager): self.root_dir = qbit_manager.config.root_dir self.orphaned_dir = qbit_manager.config.orphaned_dir - global _config - _config = self.config - self.pool = Pool(processes=max(cpu_count() - 1, 1)) + max_workers = max(os.cpu_count() - 1, 1) + self.executor = ThreadPoolExecutor(max_workers=max_workers) self.rem_orphaned() - self.cleanup_pool() + self.executor.shutdown() def rem_orphaned(self): """Remove orphaned files from remote directory""" self.stats = 0 logger.separator("Checking for Orphaned Files", space=False, border=False) torrent_files = [] - root_files = [] orphaned_files = [] excluded_orphan_files = [] - if self.remote_dir != self.root_dir: - local_orphaned_dir = self.orphaned_dir.replace(self.remote_dir, self.root_dir) - root_files = [ - os.path.join(path.replace(self.remote_dir, self.root_dir), name) - for path, subdirs, files in os.walk(self.remote_dir) - for name in files - if local_orphaned_dir not in path - ] - else: - root_files = [ - os.path.join(path, name) - for path, subdirs, files in os.walk(self.root_dir) - for name in files - if self.orphaned_dir not in path - ] + root_files = self.executor.submit(util.get_root_files, self.remote_dir, self.root_dir, self.orphaned_dir) # Get an updated list of torrents logger.print_line("Locating orphan files", self.config.loglevel) torrent_list = self.qbt.get_torrents({"sort": "added_on"}) - torrent_files_and_save_path = [] - for torrent in torrent_list: - torrent_files = [] - for torrent_files_dict in torrent.files: - torrent_files.append(torrent_files_dict.name) - torrent_files_and_save_path.append((torrent_files, torrent.save_path)) + torrent_files.extend( [ fullpath - for fullpathlist in self.pool.starmap(get_full_path_of_torrent_files, torrent_files_and_save_path) + for fullpathlist in self.executor.map(self.get_full_path_of_torrent_files, torrent_list) for fullpath in fullpathlist - if fullpath not in torrent_files ] ) - orphaned_files = set(root_files) - set(torrent_files) + orphaned_files = set(root_files.result()) - set(torrent_files) if self.config.orphaned["exclude_patterns"]: logger.print_line("Processing orphan exclude patterns") @@ -108,30 +83,27 @@ def rem_orphaned(self): self.config.send_notifications(attr) # Delete empty directories after moving orphan files if not self.config.dry_run: - orphaned_parent_path = set(self.pool.map(move_orphan, orphaned_files)) + orphaned_parent_path = set(self.executor.map(self.move_orphan, orphaned_files)) logger.print_line("Removing newly empty directories", self.config.loglevel) - self.pool.starmap(util.remove_empty_directories, zip(orphaned_parent_path, repeat("**/*"))) + self.executor.map(lambda dir: util.remove_empty_directories(dir, "**/*"), orphaned_parent_path) else: logger.print_line("No Orphaned Files found.", self.config.loglevel) - def cleanup_pool(self): - self.pool.close() - self.pool.join() - - -def get_full_path_of_torrent_files(torrent_files, save_path): - fullpath_torrent_files = [] - for file in torrent_files: - fullpath = os.path.join(save_path, file) - # Replace fullpath with \\ if qbm is running in docker (linux) but qbt is on windows - fullpath = fullpath.replace(r"/", "\\") if ":\\" in fullpath else fullpath - fullpath_torrent_files.append(fullpath) - return fullpath_torrent_files - - -def move_orphan(file): - src = file.replace(_config.root_dir, _config.remote_dir) # Could be optimized to only run when root != remote - dest = os.path.join(_config.orphaned_dir, file.replace(_config.root_dir, "")) - util.move_files(src, dest, True) - return os.path.dirname(file).replace(_config.root_dir, _config.remote_dir) # Another candidate for micro optimizing + def move_orphan(self, file): + src = file.replace(self.root_dir, self.remote_dir) + dest = os.path.join(self.orphaned_dir, file.replace(self.root_dir, "")) + util.move_files(src, dest, True) + return os.path.dirname(file).replace(self.root_dir, self.remote_dir) + + def get_full_path_of_torrent_files(self, torrent): + torrent_files = map(lambda dict: dict.name, torrent.files) + save_path = torrent.save_path + + fullpath_torrent_files = [] + for file in torrent_files: + fullpath = os.path.join(save_path, file) + # Replace fullpath with \\ if qbm is running in docker (linux) but qbt is on windows + fullpath = fullpath.replace(r"/", "\\") if ":\\" in fullpath else fullpath + fullpath_torrent_files.append(fullpath) + return fullpath_torrent_files diff --git a/modules/core/tag_nohardlinks.py b/modules/core/tag_nohardlinks.py index dbd3c8d3..97d4495c 100644 --- a/modules/core/tag_nohardlinks.py +++ b/modules/core/tag_nohardlinks.py @@ -186,6 +186,7 @@ def tag_nohardlinks(self): """Tag torrents with no hardlinks""" logger.separator("Tagging Torrents with No Hardlinks", space=False, border=False) nohardlinks = self.nohardlinks + check_hardlinks = util.CheckHardLinks(self.root_dir, self.remote_dir) for category in nohardlinks: torrent_list = self.qbt.get_torrents({"category": category, "status_filter": "completed"}) if len(torrent_list) == 0: @@ -199,7 +200,7 @@ def tag_nohardlinks(self): continue for torrent in torrent_list: tracker = self.qbt.get_tags(torrent.trackers) - has_nohardlinks = util.nohardlink( + has_nohardlinks = check_hardlinks.nohardlink( torrent["content_path"].replace(self.root_dir, self.remote_dir), self.config.notify ) if any(tag in torrent.tags for tag in nohardlinks[category]["exclude_tags"]): diff --git a/modules/logs.py b/modules/logs.py index f580dfd4..ef007436 100755 --- a/modules/logs.py +++ b/modules/logs.py @@ -17,7 +17,7 @@ DRYRUN = 25 INFO = 20 DEBUG = 10 -TRACE = 5 +TRACE = 0 def fmt_filter(record): @@ -72,17 +72,19 @@ def _get_handler(self, log_file, count=3): """Get handler for log file""" max_bytes = 1024 * 1024 * 2 _handler = RotatingFileHandler(log_file, delay=True, mode="w", maxBytes=max_bytes, backupCount=count, encoding="utf-8") - self._formatter(_handler) + self._formatter(handler=_handler) # if os.path.isfile(log_file): # _handler.doRollover() return _handler - def _formatter(self, handler, border=True): + def _formatter(self, handler=None, border=True, log_only=False, space=False): """Format log message""" - text = f"| %(message)-{self.screen_width - 2}s |" if border else f"%(message)-{self.screen_width - 2}s" - if isinstance(handler, RotatingFileHandler): - text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}" - handler.setFormatter(logging.Formatter(text)) + console = f"| %(message)-{self.screen_width - 2}s |" if border else f"%(message)-{self.screen_width - 2}s" + file = f"{' '*65}" if space else "[%(asctime)s] %(filename)-27s %(levelname)-10s " + handlers = [handler] if handler else self._logger.handlers + for h in handlers: + if not log_only or isinstance(h, RotatingFileHandler): + h.setFormatter(logging.Formatter(f"{file if isinstance(h, RotatingFileHandler) else ''}{console}")) def add_main_handler(self): """Add main handler to logger""" @@ -233,18 +235,15 @@ def insert_space(self, display_title, space_length=0): def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False, stacklevel=1): """Log""" + log_only = False if self.spacing > 0: self.exorcise() if "\n" in msg: for i, line in enumerate(msg.split("\n")): self._log(level, line, args, exc_info=exc_info, extra=extra, stack_info=stack_info, stacklevel=stacklevel) if i == 0: - for handler in self._logger.handlers: - if isinstance(handler, RotatingFileHandler): - handler.setFormatter(logging.Formatter(" " * 65 + "| %(message)s")) - for handler in self._logger.handlers: - if isinstance(handler, RotatingFileHandler): - handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s")) + self._formatter(log_only=True, space=True) + log_only = True else: for secret in sorted(self.secrets, reverse=True): if secret in msg: @@ -266,6 +265,8 @@ def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False, st exc_info = sys.exc_info() record = self._logger.makeRecord(self._logger.name, level, func, lno, msg, args, exc_info, func, extra, sinfo) self._logger.handle(record) + if log_only: + self._formatter() def find_caller(self, stack_info=False, stacklevel=1): """Find caller""" diff --git a/modules/torrent_hash_generator.py b/modules/torrent_hash_generator.py new file mode 100644 index 00000000..69302614 --- /dev/null +++ b/modules/torrent_hash_generator.py @@ -0,0 +1,30 @@ +import hashlib + +import bencodepy + +from modules import util +from modules.util import Failed + +logger = util.logger + + +class TorrentHashGenerator: + def __init__(self, torrent_file_path): + self.torrent_file_path = torrent_file_path + + def generate_torrent_hash(self): + try: + with open(self.torrent_file_path, "rb") as torrent_file: + torrent_data = torrent_file.read() + try: + torrent_info = bencodepy.decode(torrent_data) + info_data = bencodepy.encode(torrent_info[b"info"]) + info_hash = hashlib.sha1(info_data).hexdigest() + logger.trace(f"info_hash: {info_hash}") + return info_hash + except KeyError: + logger.error("Invalid .torrent file format. 'info' key not found.") + except FileNotFoundError: + logger.error(f"Torrent file '{self.torrent_file_path}' not found.") + except Failed as err: + logger.error(f"TorrentHashGenerator Error: {err}") diff --git a/modules/util.py b/modules/util.py index 46dfe235..c4c5304c 100755 --- a/modules/util.py +++ b/modules/util.py @@ -7,6 +7,7 @@ import time from pathlib import Path +import requests import ruamel.yaml logger = logging.getLogger("qBit Manage") @@ -71,6 +72,64 @@ class TorrentMessages: ] +def guess_branch(version, env_version, git_branch): + if git_branch: + return git_branch + elif env_version == "develop": + return env_version + elif version[2] > 0: + dev_version = get_develop() + if version[1] != dev_version[1] or version[2] <= dev_version[2]: + return "develop" + else: + return "master" + + +def current_version(version, branch=None): + if branch == "develop": + return get_develop() + elif version[2] > 0: + new_version = get_develop() + if version[1] != new_version[1] or new_version[2] >= version[2]: + return new_version + else: + return get_master() + + +develop_version = None + + +def get_develop(): + global develop_version + if develop_version is None: + develop_version = get_version("develop") + return develop_version + + +master_version = None + + +def get_master(): + global master_version + if master_version is None: + master_version = get_version("master") + return master_version + + +def get_version(level): + try: + url = f"https://raw.githubusercontent.com/StuffAnThings/qbit_manage/{level}/VERSION" + return parse_version(requests.get(url).content.decode().strip(), text=level) + except requests.exceptions.ConnectionError: + return "Unknown", "Unknown", 0 + + +def parse_version(version, text="develop"): + version = version.replace("develop", text) + split_version = version.split(f"-{text}") + return version, split_version[0], int(split_version[1]) if len(split_version) > 1 else 0 + + class check: """Check for attributes in config.""" @@ -307,59 +366,118 @@ def copy_files(src, dest): def remove_empty_directories(pathlib_root_dir, pattern): """Remove empty directories recursively.""" pathlib_root_dir = Path(pathlib_root_dir) - # list all directories recursively and sort them by path, - # longest first - longest = sorted( - pathlib_root_dir.glob(pattern), - key=lambda p: len(str(p)), - reverse=True, - ) - longest.append(pathlib_root_dir) - for pdir in longest: - try: - pdir.rmdir() # remove directory if empty - except OSError: - continue # catch and continue if non-empty + try: + # list all directories recursively and sort them by path, + # longest first + longest = sorted( + pathlib_root_dir.glob(pattern), + key=lambda p: len(str(p)), + reverse=True, + ) + longest.append(pathlib_root_dir) # delete the folder itself if it's empty + for pdir in longest: + try: + pdir.rmdir() # remove directory if empty + except (FileNotFoundError, OSError): + continue # catch and continue if non-empty, folders within could already be deleted if run in parallel + except FileNotFoundError: + pass # if this is being run in parallel, pathlib_root_dir could already be deleted -def nohardlink(file, notify): +class CheckHardLinks: """ - Check if there are any hard links - Will check if there are any hard links if it passes a file or folder - If a folder is passed, it will take the largest file in that folder and only check for hardlinks - of the remaining files where the file is greater size a percentage of the largest file - This fixes the bug in #192 + Class to check for hardlinks """ - check_for_hl = True - if os.path.isfile(file): - logger.trace(f"Checking file: {file}") - if os.stat(file).st_nlink > 1: - check_for_hl = False - else: - sorted_files = sorted(Path(file).rglob("*"), key=lambda x: os.stat(x).st_size, reverse=True) - logger.trace(f"Folder: {file}") - logger.trace(f"Files Sorted by size: {sorted_files}") - threshold = 0.5 - if not sorted_files: - msg = ( - f"Nohardlink Error: Unable to open the folder {file}. " - "Please make sure folder exists and qbit_manage has access to this directory." - ) - notify(msg, "nohardlink") - logger.warning(msg) - else: - largest_file_size = os.stat(sorted_files[0]).st_size - logger.trace(f"Largest file: {sorted_files[0]}") - logger.trace(f"Largest file size: {largest_file_size}") - for files in sorted_files: - file_size = os.stat(files).st_size - file_no_hardlinks = os.stat(files).st_nlink + + def __init__(self, root_dir, remote_dir): + self.root_dir = root_dir + self.remote_dir = remote_dir + self.root_files = set(get_root_files(self.root_dir, self.remote_dir)) + self.get_inode_count() + + def get_inode_count(self): + self.inode_count = {} + for file in self.root_files: + inode_no = os.stat(file.replace(self.root_dir, self.remote_dir)).st_ino + if inode_no in self.inode_count: + self.inode_count[inode_no] += 1 + else: + self.inode_count[inode_no] = 1 + + def nohardlink(self, file, notify): + """ + Check if there are any hard links + Will check if there are any hard links if it passes a file or folder + If a folder is passed, it will take the largest file in that folder and only check for hardlinks + of the remaining files where the file is greater size a percentage of the largest file + This fixes the bug in #192 + """ + check_for_hl = True + try: + if os.path.isfile(file): + if os.path.islink(file): + logger.warning(f"Symlink found in {file}, unable to determine hardlinks. Skipping...") + return False logger.trace(f"Checking file: {file}") - logger.trace(f"Checking file size: {file_size}") - logger.trace(f"Checking no of hard links: {file_no_hardlinks}") - if file_no_hardlinks > 1 and file_size >= (largest_file_size * threshold): + logger.trace(f"Checking file inum: {os.stat(file).st_ino}") + logger.trace(f"Checking no of hard links: {os.stat(file).st_nlink}") + logger.trace(f"Checking inode_count dict: {self.inode_count.get(os.stat(file).st_ino)}") + # https://github.com/StuffAnThings/qbit_manage/issues/291 for more details + if os.stat(file).st_nlink - self.inode_count.get(os.stat(file).st_ino, 1) > 0: check_for_hl = False - return check_for_hl + else: + sorted_files = sorted(Path(file).rglob("*"), key=lambda x: os.stat(x).st_size, reverse=True) + logger.trace(f"Folder: {file}") + logger.trace(f"Files Sorted by size: {sorted_files}") + threshold = 0.5 + if not sorted_files: + msg = ( + f"Nohardlink Error: Unable to open the folder {file}. " + "Please make sure folder exists and qbit_manage has access to this directory." + ) + notify(msg, "nohardlink") + logger.warning(msg) + else: + largest_file_size = os.stat(sorted_files[0]).st_size + logger.trace(f"Largest file: {sorted_files[0]}") + logger.trace(f"Largest file size: {largest_file_size}") + for files in sorted_files: + if os.path.islink(files): + logger.warning(f"Symlink found in {files}, unable to determine hardlinks. Skipping...") + continue + file_size = os.stat(files).st_size + file_no_hardlinks = os.stat(files).st_nlink + logger.trace(f"Checking file: {file}") + logger.trace(f"Checking file inum: {os.stat(file).st_ino}") + logger.trace(f"Checking file size: {file_size}") + logger.trace(f"Checking no of hard links: {file_no_hardlinks}") + logger.trace(f"Checking inode_count dict: {self.inode_count.get(os.stat(file).st_ino)}") + if file_no_hardlinks - self.inode_count.get(os.stat(file).st_ino, 1) > 0 and file_size >= ( + largest_file_size * threshold + ): + check_for_hl = False + except PermissionError as perm: + logger.warning(f"{perm} : file {file} has permission issues. Skipping...") + return False + except FileNotFoundError as file_not_found_error: + logger.warning(f"{file_not_found_error} : File {file} not found. Skipping...") + return False + except Exception as ex: + logger.stacktrace() + logger.error(ex) + return False + return check_for_hl + + +def get_root_files(root_dir, remote_dir, exclude_dir=None): + local_exclude_dir = exclude_dir.replace(remote_dir, root_dir) if exclude_dir and remote_dir != root_dir else exclude_dir + root_files = [ + os.path.join(path.replace(remote_dir, root_dir) if remote_dir != root_dir else path, name) + for path, subdirs, files in os.walk(remote_dir if remote_dir != root_dir else root_dir) + for name in files + if not local_exclude_dir or local_exclude_dir not in path + ] + return root_files def load_json(file): diff --git a/qbit_manage.py b/qbit_manage.py index 0ca6f61b..32106c91 100755 --- a/qbit_manage.py +++ b/qbit_manage.py @@ -3,6 +3,7 @@ import argparse import glob import os +import platform import sys import time from datetime import datetime @@ -166,16 +167,23 @@ args = parser.parse_args() +static_envs = [] +test_value = None + + def get_arg(env_str, default, arg_bool=False, arg_int=False): - """Get argument from environment variable or command line argument.""" + global test_value env_vars = [env_str] if not isinstance(env_str, list) else env_str final_value = None + static_envs.extend(env_vars) for env_var in env_vars: env_value = os.environ.get(env_var) + if env_var == "BRANCH_NAME": + test_value = env_value if env_value is not None: final_value = env_value break - if final_value is not None: + if final_value or (arg_int and final_value == 0): if arg_bool: if final_value is True or final_value is False: return final_value @@ -184,13 +192,28 @@ def get_arg(env_str, default, arg_bool=False, arg_int=False): else: return False elif arg_int: - return int(final_value) + try: + return int(final_value) + except ValueError: + return default else: return str(final_value) else: return default +try: + from git import Repo, InvalidGitRepositoryError + + try: + git_branch = Repo(path=".").head.ref.name # noqa + except InvalidGitRepositoryError: + git_branch = None +except ImportError: + git_branch = None + +env_version = get_arg("BRANCH_NAME", "master") +is_docker = get_arg("QBM_DOCKER", False, arg_bool=True) run = get_arg("QBT_RUN", args.run, arg_bool=True) sch = get_arg("QBT_SCHEDULE", args.min) startupDelay = get_arg("QBT_STARTUP_DELAY", args.startupDelay) @@ -306,13 +329,15 @@ def my_except_hook(exctype, value, tbi): sys.excepthook = my_except_hook -version = "Unknown" +version = ("Unknown", "Unknown", 0) with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) as handle: for line in handle.readlines(): line = line.strip() if len(line) > 0: - version = line + version = util.parse_version(line) break +branch = util.guess_branch(version, env_version, git_branch) +version = (version[0].replace("develop", branch), version[1].replace("develop", branch), version[2]) def start_loop(): @@ -377,16 +402,12 @@ def finished_run(): try: cfg = Config(default_dir, args) qbit_manager = cfg.qbt - except Exception as ex: - if "Qbittorrent Error" in ex.args[0]: - logger.print_line(ex, "CRITICAL") - logger.print_line("Exiting scheduled Run.", "CRITICAL") - finished_run() - return None - else: - logger.stacktrace() - logger.print_line(ex, "CRITICAL") + logger.stacktrace() + logger.print_line(ex, "CRITICAL") + logger.print_line("Exiting scheduled Run.", "CRITICAL") + finished_run() + return None if qbit_manager: # Set Category @@ -397,6 +418,12 @@ def finished_run(): if cfg.commands["tag_update"]: stats["tagged"] += Tags(qbit_manager).stats + # Set Cross Seed + if cfg.commands["cross_seed"]: + cross_seed = CrossSeed(qbit_manager) + stats["added"] += cross_seed.stats_added + stats["tagged"] += cross_seed.stats_tagged + # Remove Unregistered Torrents and tag errors if cfg.commands["rem_unregistered"] or cfg.commands["tag_tracker_error"]: rem_unreg = RemoveUnregistered(qbit_manager) @@ -407,12 +434,6 @@ def finished_run(): stats["untagged_tracker_error"] += rem_unreg.stats_untagged stats["tagged"] += rem_unreg.stats_tagged - # Set Cross Seed - if cfg.commands["cross_seed"]: - cross_seed = CrossSeed(qbit_manager) - stats["added"] += cross_seed.stats_added - stats["tagged"] += cross_seed.stats_tagged - # Recheck Torrents if cfg.commands["recheck"]: recheck = ReCheck(qbit_manager) @@ -525,8 +546,17 @@ def calc_next_run(schd, write_out=False): logger.info_center(r" \__, |_.__/|_|\__| |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|") # noqa: W605 logger.info_center(" | | ______ __/ | ") # noqa: W605 logger.info_center(" |_| |______| |___/ ") # noqa: W605 - logger.info(f" Version: {version}") - + system_ver = "Docker" if is_docker else f"Python {platform.python_version()}" + logger.info(f" Version: {version[0]} ({system_ver}){f' (Git: {git_branch})' if git_branch else ''}") + latest_version = util.current_version(version, branch=branch) + new_version = ( + latest_version[0] + if latest_version and (version[1] != latest_version[1] or (version[2] and version[2] < latest_version[2])) + else None + ) + if new_version: + logger.info(f" Newest Version: {new_version}") + logger.info(f" Platform: {platform.platform()}") logger.separator(loglevel="DEBUG") logger.debug(f" --run (QBT_RUN): {run}") logger.debug(f" --schedule (QBT_SCHEDULE): {sch}") diff --git a/requirements.txt b/requirements.txt index 7e5d5dc7..17d25111 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,9 @@ +bencodepy==0.9.5 flake8==6.0.0 -pre-commit==3.2.2 +GitPython==3.1.31 +pre-commit==3.3.2 qbittorrent-api==2023.4.47 -requests==2.28.2 +requests==2.31.0 retrying==1.3.4 -ruamel.yaml==0.17.21 +ruamel.yaml==0.17.26 schedule==1.2.0