diff --git a/.gitignore b/.gitignore
index b651b326b..86facd3c1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ data*
*.pickle
authorized_chats.txt
log.txt
+accounts/*
\ No newline at end of file
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 000000000..ea40a2d28
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "vendor/cmrudl.py"]
+ path = vendor/cmrudl.py
+ url = https://github.com/JrMasterModelBuilder/cmrudl.py.git
diff --git a/Dockerfile b/Dockerfile
index 0b26e2d97..5fb7f5a77 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,15 +2,18 @@ FROM ubuntu:18.04
WORKDIR /usr/src/app
RUN chmod 777 /usr/src/app
-RUN apt -qq update
-RUN apt -qq install -y aria2 python3 python3-pip locales
+RUN apt-get -qq update
+RUN apt-get -qq install -y aria2 python3 python3-pip \
+ locales python3-lxml \
+ curl pv jq ffmpeg
COPY requirements.txt .
RUN pip3 install --no-cache-dir -r requirements.txt
-COPY . .
-RUN chmod +x aria.sh
RUN locale-gen en_US.UTF-8
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
+COPY . .
+COPY netrc /root/.netrc
+RUN chmod +x aria.sh
CMD ["bash","start.sh"]
diff --git a/README.md b/README.md
index d92f9c4f0..8e391961f 100644
--- a/README.md
+++ b/README.md
@@ -12,9 +12,11 @@ This project is heavily inspired from @out386 's telegram bot which is written i
- Docker support
- Uploading To Team Drives.
- Index Link support
+- Service account support
+- Mirror all youtube-dl supported links
+- Mirror telegram files
# Upcoming features (TODOs):
-- Mirror from Telegram files
# How to deploy?
Deploying is pretty much straight forward and is divided into several steps as follows:
@@ -46,25 +48,33 @@ cp config_sample.env config.env
_____REMOVE_THIS_LINE_____=True
```
Fill up rest of the fields. Meaning of each fields are discussed below:
-- BOT_TOKEN : The telegram bot token that you get from @BotFather
-- GDRIVE_FOLDER_ID : This is the folder ID of the Google Drive Folder to which you want to upload all the mirrors.
-- DOWNLOAD_DIR : The path to the local folder where the downloads should be downloaded to
-- DOWNLOAD_STATUS_UPDATE_INTERVAL : A short interval of time in seconds after which the Mirror progress message is updated. (I recommend to keep it 5 seconds at least)
-- OWNER_ID : The Telegram user ID (not username) of the owner of the bot
-- AUTO_DELETE_MESSAGE_DURATION : Interval of time (in seconds), after which the bot deletes it's message (and command message) which is expected to be viewed instantly. Note: Set to -1 to never automatically delete messages
-- IS_TEAM_DRIVE : (Optional field) Set to "True" if GDRIVE_FOLDER_ID is from a Team Drive else False or Leave it empty.
-- INDEX_URL : (Optional field) Refer to https://github.com/maple3142/GDIndex/ The URL should not have any trailing '/'
-
+- **BOT_TOKEN** : The telegram bot token that you get from @BotFather
+- **GDRIVE_FOLDER_ID** : This is the folder ID of the Google Drive Folder to which you want to upload all the mirrors.
+- **DOWNLOAD_DIR** : The path to the local folder where the downloads should be downloaded to
+- **DOWNLOAD_STATUS_UPDATE_INTERVAL** : A short interval of time in seconds after which the Mirror progress message is updated. (I recommend to keep it 5 seconds at least)
+- **OWNER_ID** : The Telegram user ID (not username) of the owner of the bot
+- **AUTO_DELETE_MESSAGE_DURATION** : Interval of time (in seconds), after which the bot deletes it's message (and command message) which is expected to be viewed instantly. Note: Set to -1 to never automatically delete messages
+- **IS_TEAM_DRIVE** : (Optional field) Set to "True" if GDRIVE_FOLDER_ID is from a Team Drive else False or Leave it empty.
+- **USE_SERVICE_ACCOUNTS**: (Optional field) (Leave empty if unsure) Whether to use service accounts or not. For this to work see "Using service accounts" section below.
+- **INDEX_URL** : (Optional field) Refer to https://github.com/maple3142/GDIndex/ The URL should not have any trailing '/'
+- **API_KEY** : This is to authenticate to your telegram account for downloading Telegram files. You can get this from https://my.telegram.org DO NOT put this in quotes.
+- **API_HASH** : This is to authenticate to your telegram account for downloading Telegram files. You can get this from https://my.telegram.org
+- **USER_SESSION_STRING** : Session string generated by running:
+```
+python3 generate_string_session.py
+```
Note: You can limit maximum concurrent downloads by changing the value of MAX_CONCURRENT_DOWNLOADS in aria.sh. By default, it's set to 2
## Getting Google OAuth API credential file
-- Visit the Google Cloud Console
+- Visit the [Google Cloud Console](https://console.developers.google.com/apis/credentials)
- Go to the OAuth Consent tab, fill it, and save.
- Go to the Credentials tab and click Create Credentials -> OAuth Client ID
- Choose Other and Create.
- Use the download button to download your credentials.
- Move that file to the root of mirror-bot, and rename it to credentials.json
+- Visit [Google API page](https://console.developers.google.com/apis/library)
+- Search for Drive and enable it if it is disabled
- Finally, run the script to generate token file (token.pickle) for Google Drive:
```
pip install google-api-python-client google-auth-httplib2 google-auth-oauthlib
@@ -84,3 +94,38 @@ sudo docker build . -t mirror-bot
```
sudo docker run mirror-bot
```
+
+# Using service accounts for uploading to avoid user rate limit
+For Service Account to work, you must set USE_SERVICE_ACCOUNTS="True" in config file or environment variables
+Many thanks to [AutoRClone](https://github.com/xyou365/AutoRclone) for the scripts
+## Generating service accounts
+Step 1. Generate service accounts [What is service account](https://cloud.google.com/iam/docs/service-accounts)
+---------------------------------
+Let us create only the service accounts that we need.
+**Warning:** abuse of this feature is not the aim of autorclone and we do **NOT** recommend that you make a lot of projects, just one project and 100 sa allow you plenty of use, its also possible that overabuse might get your projects banned by google.
+
+```
+Note: 1 service account can copy around 750gb a day, 1 project makes 100 service accounts so thats 75tb a day, for most users this should easily suffice.
+```
+
+`python3 gen_sa_accounts.py --quick-setup 1 --new-only`
+
+A folder named accounts will be created which will contain keys for the service accounts created
+
+NOTE: If you have created SAs in past from this script, you can also just re download the keys by running:
+```
+python3 gen_sa_accounts.py --download-keys project_id
+```
+
+### Add all the service accounts to the Team Drive or folder
+- Run:
+```
+python3 add_to_team_drive.py -d SharedTeamDriveSrcID
+```
+
+# Youtube-dl authentication using .netrc file
+For using your premium accounts in youtube-dl, edit the netrc file (in the root directory of this repository) according to following format:
+```
+machine host login username password my_youtube_password
+```
+where host is the name of extractor (eg. youtube, twitch). Multiple accounts of different hosts can be added each separated by a new line
\ No newline at end of file
diff --git a/add_to_team_drive.py b/add_to_team_drive.py
new file mode 100644
index 000000000..222cbe1b1
--- /dev/null
+++ b/add_to_team_drive.py
@@ -0,0 +1,77 @@
+from __future__ import print_function
+from google.oauth2.service_account import Credentials
+import googleapiclient.discovery, json, progress.bar, glob, sys, argparse, time
+from google_auth_oauthlib.flow import InstalledAppFlow
+from google.auth.transport.requests import Request
+import os, pickle
+
+stt = time.time()
+
+parse = argparse.ArgumentParser(
+ description='A tool to add service accounts to a shared drive from a folder containing credential files.')
+parse.add_argument('--path', '-p', default='accounts',
+ help='Specify an alternative path to the service accounts folder.')
+parse.add_argument('--credentials', '-c', default='./credentials.json',
+ help='Specify the relative path for the credentials file.')
+parse.add_argument('--yes', '-y', default=False, action='store_true', help='Skips the sanity prompt.')
+parsereq = parse.add_argument_group('required arguments')
+parsereq.add_argument('--drive-id', '-d', help='The ID of the Shared Drive.', required=True)
+
+args = parse.parse_args()
+acc_dir = args.path
+did = args.drive_id
+credentials = glob.glob(args.credentials)
+
+try:
+ open(credentials[0], 'r')
+ print('>> Found credentials.')
+except IndexError:
+ print('>> No credentials found.')
+ sys.exit(0)
+
+if not args.yes:
+ # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
+ # credentials[0],'r').read()))['installed']['client_id'])
+ input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
+ '(shared drive) as Manager\n>> (Press any key to continue)')
+
+creds = None
+if os.path.exists('token_sa.pickle'):
+ with open('token_sa.pickle', 'rb') as token:
+ creds = pickle.load(token)
+# If there are no (valid) credentials available, let the user log in.
+if not creds or not creds.valid:
+ if creds and creds.expired and creds.refresh_token:
+ creds.refresh(Request())
+ else:
+ flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
+ 'https://www.googleapis.com/auth/admin.directory.group',
+ 'https://www.googleapis.com/auth/admin.directory.group.member'
+ ])
+ # creds = flow.run_local_server(port=0)
+ creds = flow.run_console()
+ # Save the credentials for the next run
+ with open('token_sa.pickle', 'wb') as token:
+ pickle.dump(creds, token)
+
+drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
+batch = drive.new_batch_http_request()
+
+aa = glob.glob('%s/*.json' % acc_dir)
+pbar = progress.bar.Bar("Readying accounts", max=len(aa))
+for i in aa:
+ ce = json.loads(open(i, 'r').read())['client_email']
+ batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
+ "role": "fileOrganizer",
+ "type": "user",
+ "emailAddress": ce
+ }))
+ pbar.next()
+pbar.finish()
+print('Adding...')
+batch.execute()
+
+print('Complete.')
+hours, rem = divmod((time.time() - stt), 3600)
+minutes, sec = divmod(rem, 60)
+print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
\ No newline at end of file
diff --git a/bot/__init__.py b/bot/__init__.py
index 32565c413..1f5cf4038 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -1,11 +1,15 @@
import logging
-import aria2p
-import threading
import os
-from dotenv import load_dotenv
-import telegram.ext as tg
+import threading
import time
+import aria2p
+import telegram.ext as tg
+from dotenv import load_dotenv
+import socket
+
+socket.setdefaulttimeout(600)
+
botStartTime = time.time()
if os.path.exists('log.txt'):
with open('log.txt', 'r+') as f:
@@ -69,6 +73,9 @@ def getConfig(name: str):
DOWNLOAD_STATUS_UPDATE_INTERVAL = int(getConfig('DOWNLOAD_STATUS_UPDATE_INTERVAL'))
OWNER_ID = int(getConfig('OWNER_ID'))
AUTO_DELETE_MESSAGE_DURATION = int(getConfig('AUTO_DELETE_MESSAGE_DURATION'))
+ USER_SESSION_STRING = getConfig('USER_SESSION_STRING')
+ TELEGRAM_API = getConfig('TELEGRAM_API')
+ TELEGRAM_HASH = getConfig('TELEGRAM_HASH')
except KeyError as e:
LOGGER.error("One or more env variables missing! Exiting now")
exit(1)
@@ -80,13 +87,22 @@ def getConfig(name: str):
INDEX_URL = None
try:
IS_TEAM_DRIVE = getConfig('IS_TEAM_DRIVE')
- if IS_TEAM_DRIVE == 'True' or IS_TEAM_DRIVE == 'true':
+ if IS_TEAM_DRIVE.lower() == 'true':
IS_TEAM_DRIVE = True
else:
IS_TEAM_DRIVE = False
-
except KeyError:
IS_TEAM_DRIVE = False
-updater = tg.Updater(token=BOT_TOKEN)
+
+try:
+ USE_SERVICE_ACCOUNTS = getConfig('USE_SERVICE_ACCOUNTS')
+ if USE_SERVICE_ACCOUNTS.lower() == 'true':
+ USE_SERVICE_ACCOUNTS = True
+ else:
+ USE_SERVICE_ACCOUNTS = False
+except KeyError:
+ USE_SERVICE_ACCOUNTS = False
+
+updater = tg.Updater(token=BOT_TOKEN,use_context=True)
bot = updater.bot
dispatcher = updater.dispatcher
diff --git a/bot/__main__.py b/bot/__main__.py
index 0c0d2d692..4140a3b33 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -1,17 +1,22 @@
+import shutil
+import signal
+import pickle
+
+from os import execl, path, remove
+from sys import executable
+
from telegram.ext import CommandHandler, run_async
-from bot import dispatcher, LOGGER, updater, botStartTime
+from bot import dispatcher, updater, botStartTime
from bot.helper.ext_utils import fs_utils
-from .helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time
-import signal
-import time
+from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.message_utils import *
-import shutil
+from .helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time
from .helper.telegram_helper.filters import CustomFilters
-from bot.helper.telegram_helper.bot_commands import BotCommands
-from .modules import authorize, list, cancel_mirror, mirror_status, mirror
+from .modules import authorize, list, cancel_mirror, mirror_status, mirror, clone, watch
+
@run_async
-def stats(bot,update):
+def stats(update, context):
currentTime = get_readable_time((time.time() - botStartTime))
total, used, free = shutil.disk_usage('.')
total = get_readable_file_size(total)
@@ -19,33 +24,42 @@ def stats(bot,update):
free = get_readable_file_size(free)
stats = f'Bot Uptime: {currentTime}\n' \
f'Total disk space: {total}\n' \
- f'Used: {used}\n' \
- f'Free: {free}'
- sendMessage(stats, bot, update)
-
+ f'Used: {used}\n' \
+ f'Free: {free}'
+ sendMessage(stats, context.bot, update)
@run_async
-def start(bot,update):
+def start(update, context):
sendMessage("This is a bot which can mirror all your links to Google drive!\n"
- "Type /help to get a list of available commands", bot, update)
+ "Type /help to get a list of available commands", context.bot, update)
@run_async
-def ping(bot,update):
+def restart(update, context):
+ restart_message = sendMessage("Restarting, Please wait!", context.bot, update)
+ # Save restart message object in order to reply to it after restarting
+ fs_utils.clean_all()
+ with open('restart.pickle', 'wb') as status:
+ pickle.dump(restart_message, status)
+ execl(executable, executable, "-m", "bot")
+
+
+@run_async
+def ping(update, context):
start_time = int(round(time.time() * 1000))
- reply = sendMessage("Starting Ping", bot, update)
- end_time = int(round(time.time()*1000))
- editMessage(f'{end_time - start_time} ms',reply)
+ reply = sendMessage("Starting Ping", context.bot, update)
+ end_time = int(round(time.time() * 1000))
+ editMessage(f'{end_time - start_time} ms', reply)
@run_async
-def log(bot,update):
- sendLogFile(bot, update)
+def log(update, context):
+ sendLogFile(context.bot, update)
@run_async
-def bot_help(bot,update):
+def bot_help(update, context):
help_string = f'''
/{BotCommands.HelpCommand}: To get this message
@@ -53,6 +67,10 @@ def bot_help(bot,update):
/{BotCommands.TarMirrorCommand} [download_url][magnet_link]: start mirroring and upload the archived (.tar) version of the download
+/{BotCommands.WatchCommand} [youtube-dl supported link]: Mirror through youtube-dl
+
+/{BotCommands.TarWatchCommand} [youtube-dl supported link]: Mirror through youtube-dl and tar before uploading
+
/{BotCommands.CancelMirror} : Reply to the message by which the download was initiated and that download will be cancelled
/{BotCommands.StatusCommand}: Shows a status of all the downloads
@@ -66,22 +84,32 @@ def bot_help(bot,update):
/{BotCommands.LogCommand}: Get a log file of the bot. Handy for getting crash reports
'''
- sendMessage(help_string, bot, update)
+ sendMessage(help_string, context.bot, update)
def main():
fs_utils.start_cleanup()
+ # Check if the bot is restarting
+ if path.exists('restart.pickle'):
+ with open('restart.pickle', 'rb') as status:
+ restart_message = pickle.load(status)
+ restart_message.edit_text("Restarted Successfully!")
+ remove('restart.pickle')
+
start_handler = CommandHandler(BotCommands.StartCommand, start,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
ping_handler = CommandHandler(BotCommands.PingCommand, ping,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
+ restart_handler = CommandHandler(BotCommands.RestartCommand, restart,
+ filters=CustomFilters.owner_filter)
help_handler = CommandHandler(BotCommands.HelpCommand,
bot_help, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
stats_handler = CommandHandler(BotCommands.StatsCommand,
- stats, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
+ stats, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
log_handler = CommandHandler(BotCommands.LogCommand, log, filters=CustomFilters.owner_filter)
dispatcher.add_handler(start_handler)
dispatcher.add_handler(ping_handler)
+ dispatcher.add_handler(restart_handler)
dispatcher.add_handler(help_handler)
dispatcher.add_handler(stats_handler)
dispatcher.add_handler(log_handler)
diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index ea8f598ad..8d0e6ce8e 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -1,9 +1,10 @@
-from bot import download_dict, download_dict_lock
import logging
import re
import threading
import time
+from bot import download_dict, download_dict_lock
+
LOGGER = logging.getLogger(__name__)
MAGNET_REGEX = r"magnet:\?xt=urn:btih:[a-zA-Z0-9]*"
@@ -45,6 +46,8 @@ def cancel(self):
def get_readable_file_size(size_in_bytes) -> str:
+ if size_in_bytes is None:
+ return '0B'
index = 0
while size_in_bytes >= 1024:
size_in_bytes /= 1024
@@ -55,6 +58,15 @@ def get_readable_file_size(size_in_bytes) -> str:
return 'File too large'
+def getDownloadByGid(gid):
+ with download_dict_lock:
+ for dl in download_dict.values():
+ if dl.status() == MirrorStatus.STATUS_DOWNLOADING or dl.status() == MirrorStatus.STATUS_WAITING:
+ if dl.gid() == gid:
+ return dl
+ return None
+
+
def get_progress_bar_string(status):
completed = status.processed_bytes() / 8
total = status.size_raw() / 8
@@ -73,14 +85,6 @@ def get_progress_bar_string(status):
return p_str
-def get_download_index(_list, gid):
- index = 0
- for i in _list:
- if i.download().gid == gid:
- return index
- index += 1
-
-
def get_readable_message():
with download_dict_lock:
msg = ""
@@ -89,12 +93,13 @@ def get_readable_message():
msg += download.status()
if download.status() != MirrorStatus.STATUS_ARCHIVING:
msg += f"\n{get_progress_bar_string(download)} {download.progress()}
of " \
- f"{download.size()}" \
- f" at {download.speed()}, ETA: {download.eta()} "
+ f"{download.size()}" \
+ f" at {download.speed()}, ETA: {download.eta()} "
if download.status() == MirrorStatus.STATUS_DOWNLOADING:
if hasattr(download, 'is_torrent'):
- msg += f"| P: {download.download().connections} " \
- f"| S: {download.download().num_seeders}"
+ msg += f"| P: {download.aria_download().connections} " \
+ f"| S: {download.aria_download().num_seeders}"
+ msg += f"\nGID: {download.gid()}
"
msg += "\n\n"
return msg
diff --git a/bot/helper/ext_utils/exceptions.py b/bot/helper/ext_utils/exceptions.py
index 0181b5199..25ff87fa6 100644
--- a/bot/helper/ext_utils/exceptions.py
+++ b/bot/helper/ext_utils/exceptions.py
@@ -1,17 +1,2 @@
-class DriveAuthError(Exception):
+class DirectDownloadLinkException(Exception):
pass
-
-
-class MessageDeletedError(Exception):
- """ Custom Exception class for killing thread as soon as they aren't needed"""
-
- def __init__(self, message, error=None):
- super().__init__(message)
- self.error = error
-
-
-class DownloadCancelled(Exception):
-
- def __init__(self, message, error=None):
- super().__init__(message)
- self.error = error
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
index 1d0a7ed9b..0b92aa0d8 100644
--- a/bot/helper/ext_utils/fs_utils.py
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -4,6 +4,7 @@
import os
import pathlib
import magic
+import tarfile
def clean_download(path: str):
@@ -19,23 +20,40 @@ def start_cleanup():
pass
+def clean_all():
+ aria2.remove_all(True)
+ shutil.rmtree(DOWNLOAD_DIR)
+
+
def exit_clean_up(signal, frame):
try:
LOGGER.info("Please wait, while we clean up the downloads and stop running downloads")
- aria2.remove_all(True)
- shutil.rmtree(DOWNLOAD_DIR)
+ clean_all()
sys.exit(0)
except KeyboardInterrupt:
LOGGER.warning("Force Exiting before the cleanup finishes!")
sys.exit(1)
-def tar(orig_path: str):
- path = pathlib.PurePath(orig_path)
- base = path.name
- root = pathlib.Path(path.parent.as_posix()).absolute().as_posix()
- LOGGER.info(f'Tar: orig_path: {orig_path}, base: {base}, root: {root}')
- return shutil.make_archive(orig_path, 'tar', root, base)
+def get_path_size(path):
+ if os.path.isfile(path):
+ return os.path.getsize(path)
+ total_size = 0
+ for root, dirs, files in os.walk(path):
+ for f in files:
+ abs_path = os.path.join(root, f)
+ total_size += os.path.getsize(abs_path)
+ return total_size
+
+
+def tar(org_path):
+ tar_path = org_path + ".tar"
+ path = pathlib.PurePath(org_path)
+ LOGGER.info(f'Tar: orig_path: {org_path}, tar_path: {tar_path}')
+ tar = tarfile.open(tar_path, "w")
+ tar.add(org_path, arcname=path.name)
+ tar.close()
+ return tar_path
def get_mime_type(file_path):
diff --git a/bot/helper/mirror_utils/download_utils/aria2_download.py b/bot/helper/mirror_utils/download_utils/aria2_download.py
index ea449b778..22c403ca1 100644
--- a/bot/helper/mirror_utils/download_utils/aria2_download.py
+++ b/bot/helper/mirror_utils/download_utils/aria2_download.py
@@ -1,4 +1,4 @@
-from bot import aria2,download_dict,download_dict_lock
+from bot import aria2
from bot.helper.ext_utils.bot_utils import *
from .download_helper import DownloadHelper
from bot.helper.mirror_utils.status_utils.aria_download_status import AriaDownloadStatus
@@ -6,13 +6,14 @@
import threading
from aria2p import API
+
class AriaDownloadHelper(DownloadHelper):
def __init__(self, listener):
super().__init__()
self.gid = None
- self._listener = listener
- self._resource_lock = threading.Lock()
+ self.__listener = listener
+ self._resource_lock = threading.RLock()
def __onDownloadStarted(self, api, gid):
with self._resource_lock:
@@ -28,23 +29,23 @@ def __onDownloadComplete(self, api: API, gid):
if download.followed_by_ids:
self.gid = download.followed_by_ids[0]
with download_dict_lock:
- download_dict[self._listener.uid] = AriaDownloadStatus(self.gid, self._listener)
- if download.is_torrent:
- download_dict[self._listener.uid].is_torrent = True
+ download_dict[self.__listener.uid] = AriaDownloadStatus(self, self.__listener)
+ if download.is_torrent:
+ download_dict[self.__listener.uid].is_torrent = True
update_all_messages()
LOGGER.info(f'Changed gid from {gid} to {self.gid}')
else:
- self._listener.onDownloadComplete()
+ self.__listener.onDownloadComplete()
def __onDownloadPause(self, api, gid):
if self.gid == gid:
LOGGER.info("Called onDownloadPause")
- self._listener.onDownloadError('Download stopped by user!')
+ self.__listener.onDownloadError('Download stopped by user!')
def __onDownloadStopped(self, api, gid):
if self.gid == gid:
LOGGER.info("Called on_download_stop")
- self._listener.onDownloadError('Download stopped by user!')
+ self.__listener.onDownloadError('Download stopped by user!')
def __onDownloadError(self, api, gid):
with self._resource_lock:
@@ -52,7 +53,7 @@ def __onDownloadError(self, api, gid):
download = api.get_download(gid)
error = download.error_message
LOGGER.info(f"Download Error: {error}")
- self._listener.onDownloadError(error)
+ self.__listener.onDownloadError(error)
def add_download(self, link: str, path):
if is_magnet(link):
@@ -61,9 +62,9 @@ def add_download(self, link: str, path):
download = aria2.add_uris([link], {'dir': path})
self.gid = download.gid
with download_dict_lock:
- download_dict[self._listener.uid] = AriaDownloadStatus(self.gid, self._listener)
+ download_dict[self.__listener.uid] = AriaDownloadStatus(self, self.__listener)
if download.error_message:
- self._listener.onDownloadError(download.error_message)
+ self.__listener.onDownloadError(download.error_message)
return
LOGGER.info(f"Started: {self.gid} DIR:{download.dir} ")
aria2.listen_to_notifications(threaded=True, on_download_start=self.__onDownloadStarted,
@@ -71,3 +72,14 @@ def add_download(self, link: str, path):
on_download_pause=self.__onDownloadPause,
on_download_stop=self.__onDownloadStopped,
on_download_complete=self.__onDownloadComplete)
+
+ def cancel_download(self):
+ download = aria2.get_download(self.gid)
+ if download.is_waiting:
+ aria2.remove([download])
+ self.__listener.onDownloadError("Cancelled by user")
+ return
+ if len(download.followed_by_ids) != 0:
+ downloads = aria2.get_downloads(download.followed_by_ids)
+ aria2.pause(downloads)
+ aria2.pause([download])
diff --git a/bot/helper/mirror_utils/download_utils/direct_link_generator.py b/bot/helper/mirror_utils/download_utils/direct_link_generator.py
new file mode 100644
index 000000000..84ea43a5b
--- /dev/null
+++ b/bot/helper/mirror_utils/download_utils/direct_link_generator.py
@@ -0,0 +1,159 @@
+# Copyright (C) 2019 The Raphielscape Company LLC.
+#
+# Licensed under the Raphielscape Public License, Version 1.c (the "License");
+# you may not use this file except in compliance with the License.
+#
+""" Helper Module containing various sites direct links generators. This module is copied and modified as per need
+from https://github.com/AvinashReddy3108/PaperplaneExtended . I hereby take no credit of the following code other
+than the modifications. See https://github.com/AvinashReddy3108/PaperplaneExtended/commits/master/userbot/modules/direct_links.py
+for original authorship. """
+
+import json
+import re
+import urllib.parse
+from os import popen
+from random import choice
+
+import requests
+from bs4 import BeautifulSoup
+
+from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
+
+
+def direct_link_generator(link: str):
+ """ direct links generator """
+ if not link:
+ raise DirectDownloadLinkException("`No links found!`")
+ elif 'zippyshare.com' in link:
+ return zippy_share(link)
+ elif 'yadi.sk' in link:
+ return yandex_disk(link)
+ elif 'cloud.mail.ru' in link:
+ return cm_ru(link)
+ elif 'mediafire.com' in link:
+ return mediafire(link)
+ elif 'osdn.net' in link:
+ return osdn(link)
+ elif 'github.com' in link:
+ return github(link)
+ else:
+ raise DirectDownloadLinkException(f'No Direct link function found for {link}')
+
+
+def zippy_share(url: str) -> str:
+ """ ZippyShare direct links generator
+ Based on https://github.com/LameLemon/ziggy"""
+ dl_url = ''
+ try:
+ link = re.findall(r'\bhttps?://.*zippyshare\.com\S+', url)[0]
+ except IndexError:
+ raise DirectDownloadLinkException("`No ZippyShare links found`\n")
+ session = requests.Session()
+ base_url = re.search('http.+.com', link).group()
+ response = session.get(link)
+ page_soup = BeautifulSoup(response.content, "lxml")
+ scripts = page_soup.find_all("script", {"type": "text/javascript"})
+ for script in scripts:
+ if "getElementById('dlbutton')" in script.text:
+ url_raw = re.search(r'= (?P\".+\" \+ (?P