diff --git a/bot/__init__.py b/bot/__init__.py
index 334f600a0f9..da5a516800f 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -115,6 +115,8 @@ def get_client() -> qba.TorrentsAPIMixIn:
# Stores list of users and chats the bot is authorized to use in
AUTHORIZED_CHATS = set()
SUDO_USERS = set()
+AS_DOC_USERS = set()
+AS_MEDIA_USERS = set()
if os.path.exists('authorized_chats.txt'):
with open('authorized_chats.txt', 'r+') as f:
lines = f.readlines()
@@ -191,11 +193,15 @@ def get_client() -> qba.TorrentsAPIMixIn:
telegraph_token = telegraph.get_access_token()
try:
- STATUS_LIMIT = getConfig('STATUS_LIMIT')
- if len(STATUS_LIMIT) == 0:
+ TG_SPLIT_SIZE = int(getConfig('TG_SPLIT_SIZE'))
+ if len(f'TG_SPLIT_SIZE') == 0 or TG_SPLIT_SIZE > 2097152000:
+ raise KeyError
+except KeyError:
+ TG_SPLIT_SIZE = 2097152000
+try:
+ STATUS_LIMIT = int(getConfig('STATUS_LIMIT'))
+ if len(f'STATUS_LIMIT') == 0:
raise KeyError
- else:
- STATUS_LIMIT = int(getConfig('STATUS_LIMIT'))
except KeyError:
STATUS_LIMIT = None
try:
@@ -330,6 +336,11 @@ def get_client() -> qba.TorrentsAPIMixIn:
IS_VPS = IS_VPS.lower() == 'true'
except KeyError:
IS_VPS = False
+try:
+ AS_DOCUMENT = getConfig('AS_DOCUMENT')
+ AS_DOCUMENT = AS_DOCUMENT.lower() == 'true'
+except KeyError:
+ AS_DOCUMENT = False
try:
RECURSIVE_SEARCH = getConfig('RECURSIVE_SEARCH')
RECURSIVE_SEARCH = RECURSIVE_SEARCH.lower() == 'true'
diff --git a/bot/__main__.py b/bot/__main__.py
index 57c8ae9f46f..2710cb47521 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -17,7 +17,7 @@
from .helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time
from .helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper import button_build
-from .modules import authorize, list, cancel_mirror, mirror_status, mirror, clone, watch, shell, eval, torrent_search, delete, speedtest, count
+from .modules import authorize, list, cancel_mirror, mirror_status, mirror, clone, watch, shell, eval, torrent_search, delete, speedtest, count, leech_settings
def stats(update, context):
diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index d146acae927..6a86a42e7ee 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -29,6 +29,7 @@ class MirrorStatus:
STATUS_PAUSE = "Paused...⭕️"
STATUS_ARCHIVING = "Archiving...🔐"
STATUS_EXTRACTING = "Extracting...📂"
+ STATUS_SPLITTING = "Splitting...✂️"
PROGRESS_MAX_SIZE = 100 // 8
@@ -54,7 +55,6 @@ def __setInterval(self):
def cancel(self):
self.stopEvent.set()
-
def get_readable_file_size(size_in_bytes) -> str:
if size_in_bytes is None:
return '0B'
@@ -67,7 +67,6 @@ def get_readable_file_size(size_in_bytes) -> str:
except IndexError:
return 'File too large'
-
def getDownloadByGid(gid):
with download_dict_lock:
for dl in download_dict.values():
@@ -77,13 +76,13 @@ def getDownloadByGid(gid):
not in [
MirrorStatus.STATUS_ARCHIVING,
MirrorStatus.STATUS_EXTRACTING,
+ MirrorStatus.STATUS_SPLITTING,
]
and dl.gid() == gid
):
return dl
return None
-
def getAllDownload():
with download_dict_lock:
for dlDetails in download_dict.values():
@@ -93,6 +92,7 @@ def getAllDownload():
not in [
MirrorStatus.STATUS_ARCHIVING,
MirrorStatus.STATUS_EXTRACTING,
+ MirrorStatus.STATUS_SPLITTING,
MirrorStatus.STATUS_CLONING,
MirrorStatus.STATUS_UPLOADING,
]
@@ -101,7 +101,6 @@ def getAllDownload():
return dlDetails
return None
-
def get_progress_bar_string(status):
completed = status.processed_bytes() / 8
total = status.size_raw() / 8
@@ -116,11 +115,10 @@ def get_progress_bar_string(status):
p_str = f"[{p_str}]"
return p_str
-
def get_readable_message():
with download_dict_lock:
msg = ""
- INDEX = 0
+ start = 0
if STATUS_LIMIT is not None:
dick_no = len(download_dict)
global pages
@@ -128,44 +126,40 @@ def get_readable_message():
if PAGE_NO > pages and pages != 0:
globals()['COUNT'] -= STATUS_LIMIT
globals()['PAGE_NO'] -= 1
- for download in list(download_dict.values()):
- INDEX += 1
- if INDEX > COUNT:
- msg += f"Filename: {download.name()}
"
- msg += f"\nStatus: {download.status()}"
- if download.status() not in [
- MirrorStatus.STATUS_ARCHIVING,
- MirrorStatus.STATUS_EXTRACTING,
- ]:
- msg += f"\n{get_progress_bar_string(download)} {download.progress()}
"
- if download.status() == MirrorStatus.STATUS_CLONING:
- msg += f"\nCloned: {get_readable_file_size(download.processed_bytes())}
of {download.size()}
"
- elif download.status() == MirrorStatus.STATUS_UPLOADING:
- msg += f"\nUploaded: {get_readable_file_size(download.processed_bytes())}
of {download.size()}
"
- else:
- msg += f"\nDownloaded: {get_readable_file_size(download.processed_bytes())}
of {download.size()}
"
- msg += f"\nSpeed: {download.speed()}
" \
- f", ETA: {download.eta()}
"
- # if hasattr(download, 'is_torrent'):
- try:
- msg += f"\nSeeders: {download.aria_download().num_seeders}
" \
- f" | Peers: {download.aria_download().connections}
"
- except:
- pass
- try:
- msg += f"\nSeeders: {download.torrent_info().num_seeds}
" \
- f" | Leechers: {download.torrent_info().num_leechs}
"
- except:
- pass
- msg += f"\nTo Stop: /{BotCommands.CancelMirror} {download.gid()}
"
- msg += "\n\n"
- if STATUS_LIMIT is not None and INDEX >= COUNT + STATUS_LIMIT:
- break
+ start = COUNT
+ for index, download in enumerate(list(download_dict.values())[start:], start=1):
+ msg += f"Filename: {download.name()}
"
+ msg += f"\nStatus: {download.status()}"
+ if download.status() not in [
+ MirrorStatus.STATUS_ARCHIVING,
+ MirrorStatus.STATUS_EXTRACTING,
+ MirrorStatus.STATUS_SPLITTING,
+ ]:
+ msg += f"\n{get_progress_bar_string(download)}
{download.progress()}"
+ if download.status() == MirrorStatus.STATUS_CLONING:
+ msg += f"\nCloned: {get_readable_file_size(download.processed_bytes())} of {download.size()}"
+ elif download.status() == MirrorStatus.STATUS_UPLOADING:
+ msg += f"\nUploaded: {get_readable_file_size(download.processed_bytes())} of {download.size()}"
+ else:
+ msg += f"\nDownloaded: {get_readable_file_size(download.processed_bytes())} of {download.size()}"
+ msg += f"\nSpeed: {download.speed()} ETA: {download.eta()}"
+ try:
+ msg += f"\nSeeders: {download.aria_download().num_seeders}" \
+ f" | Peers: {download.aria_download().connections}"
+ except:
+ pass
+ try:
+ msg += f"\nSeeders: {download.torrent_info().num_seeds}" \
+ f" | Leechers: {download.torrent_info().num_leechs}"
+ except:
+ pass
+ msg += f"\nTo Stop: /{BotCommands.CancelMirror} {download.gid()}
"
+ msg += "\n\n"
+ if STATUS_LIMIT is not None and index == STATUS_LIMIT:
+ break
if STATUS_LIMIT is not None:
- if INDEX > COUNT + STATUS_LIMIT:
- return None, None
if dick_no > STATUS_LIMIT:
- msg += f"Page: {PAGE_NO}/{pages}
| Tasks: {dick_no}
\n"
+ msg += f"Page: {PAGE_NO}/{pages} | Tasks: {dick_no}\n"
buttons = button_build.ButtonMaker()
buttons.sbutton("Previous", "pre")
buttons.sbutton("Next", "nex")
@@ -173,7 +167,6 @@ def get_readable_message():
return msg, button
return msg, ""
-
def flip(update, context):
query = update.callback_query
query.answer()
@@ -194,7 +187,6 @@ def flip(update, context):
PAGE_NO -= 1
message_utils.update_all_messages()
-
def check_limit(size, limit, tar_unzip_limit=None, is_tar_ext=False):
LOGGER.info('Checking File/Folder Size...')
if is_tar_ext and tar_unzip_limit is not None:
@@ -227,20 +219,16 @@ def get_readable_time(seconds: int) -> str:
result += f'{seconds}s'
return result
-
def is_url(url: str):
url = re.findall(URL_REGEX, url)
return bool(url)
-
def is_gdrive_link(url: str):
return "drive.google.com" in url
-
def is_mega_link(url: str):
return "mega.nz" in url or "mega.co.nz" in url
-
def get_mega_link_type(url: str):
if "folder" in url:
return "folder"
@@ -250,12 +238,10 @@ def get_mega_link_type(url: str):
return "folder"
return "file"
-
def is_magnet(url: str):
magnet = re.findall(MAGNET_REGEX, url)
return bool(magnet)
-
def new_thread(fn):
"""To use as decorator to make a function call threaded.
Needs import
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
index 1c6718ac856..1a25413efd9 100644
--- a/bot/helper/ext_utils/fs_utils.py
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -1,26 +1,35 @@
import sys
-from bot import aria2, LOGGER, DOWNLOAD_DIR, get_client
import shutil
import os
import pathlib
import magic
import tarfile
+import subprocess
+import time
+
+from PIL import Image
+from hachoir.parser import createParser
+from hachoir.metadata import extractMetadata
+from fsplit.filesplit import Filesplit
+
from .exceptions import NotSupportedExtractionArchive
+from bot import aria2, LOGGER, DOWNLOAD_DIR, get_client, TG_SPLIT_SIZE
+VIDEO_SUFFIXES = ("M4V", "MP4", "MOV", "FLV", "WMV", "3GP", "MPG", "WEBM", "MKV", "AVI")
+
+fs = Filesplit()
def clean_download(path: str):
if os.path.exists(path):
LOGGER.info(f"Cleaning Download: {path}")
shutil.rmtree(path)
-
def start_cleanup():
try:
shutil.rmtree(DOWNLOAD_DIR)
except FileNotFoundError:
pass
-
def clean_all():
aria2.remove_all(True)
get_client().torrents_delete(torrent_hashes="all", delete_files=True)
@@ -30,7 +39,6 @@ def clean_all():
except FileNotFoundError:
pass
-
def exit_clean_up(signal, frame):
try:
LOGGER.info("Please wait, while we clean up the downloads and stop running downloads")
@@ -40,7 +48,6 @@ def exit_clean_up(signal, frame):
LOGGER.warning("Force Exiting before the cleanup finishes!")
sys.exit(1)
-
def get_path_size(path):
if os.path.isfile(path):
return os.path.getsize(path)
@@ -51,7 +58,6 @@ def get_path_size(path):
total_size += os.path.getsize(abs_path)
return total_size
-
def tar(org_path):
tar_path = org_path + ".tar"
#path = pathlib.PurePath(org_path)
@@ -61,7 +67,6 @@ def tar(org_path):
tar.close()
return tar_path
-
def zip(name, path):
root_dir = os.path.dirname(path)
base_dir = os.path.basename(path.strip(os.sep))
@@ -70,7 +75,6 @@ def zip(name, path):
LOGGER.info(f"Zip: {zip_path}")
return zip_path
-
def get_base_name(orig_path: str):
if orig_path.endswith(".tar.bz2"):
return orig_path.replace(".tar.bz2", "")
@@ -149,9 +153,57 @@ def get_base_name(orig_path: str):
else:
raise NotSupportedExtractionArchive('File format not supported for extraction')
-
def get_mime_type(file_path):
mime = magic.Magic(mime=True)
mime_type = mime.from_file(file_path)
mime_type = mime_type or "text/plain"
return mime_type
+
+def take_ss(video_file, duration):
+ des_dir = f"Thumbnails"
+ if not os.path.exists(des_dir):
+ os.mkdir(des_dir)
+ des_dir = os.path.join(des_dir, f"{time.time()}.jpg")
+ duration = int(duration) / 2
+ subprocess.run(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
+ "-i", video_file, "-vframes", "1", des_dir])
+ Image.open(des_dir).convert("RGB").save(des_dir)
+ img = Image.open(des_dir)
+ w, h = img.size
+ img.resize((320, h))
+ img.save(des_dir, "JPEG")
+ if os.path.lexists(des_dir):
+ return des_dir, 320, h
+ else:
+ return None, 0, 0
+
+def split(path, size, split_size, start_time=0, i=1):
+ ftype = get_mime_type(path)
+ ftype = ftype.split("/")[0]
+ ftype = ftype.lower().strip()
+ out_dir = os.path.dirname(path)
+ base_name = os.path.basename(path)
+ if ftype == "video" or base_name.upper().endswith(VIDEO_SUFFIXES):
+ base_name, extension = os.path.splitext(path)
+ metadata = extractMetadata(createParser(path))
+ total_duration = metadata.get('duration').seconds - 5
+ while start_time < total_duration:
+ parted_name = "{}.part{}{}".format(str(base_name), str(i).zfill(2), str(extension))
+ out_path = os.path.join(out_dir, parted_name)
+ subprocess.run(["ffmpeg", "-hide_banner", "-loglevel", "error", "-i",
+ path, "-ss", str(start_time), "-fs", str(split_size),
+ "-strict", "-2", "-c", "copy", out_path])
+ out_size = get_path_size(out_path)
+ if out_size > TG_SPLIT_SIZE:
+ dif = out_size - TG_SPLIT_SIZE
+ split_size = split_size - dif
+ os.remove(out_path)
+ return split(path, size, split_size, start_time, i)
+ metadata = extractMetadata(createParser(out_path))
+ start_time = start_time + metadata.get('duration').seconds - 5
+ i = i + 1
+ else:
+ #subprocess.run(["split", "--numeric-suffixes=1", "--suffix-length=5", f"--bytes={split_size}", path, out_dir])
+ fs.split(file=path, split_size=split_size, output_dir=out_dir)
+ csv_path = os.path.join(out_dir, "fs_manifest.csv")
+ os.remove(csv_path)
diff --git a/bot/helper/mirror_utils/download_utils/aria2_download.py b/bot/helper/mirror_utils/download_utils/aria2_download.py
index 30d5c83983b..c392d1fcf64 100644
--- a/bot/helper/mirror_utils/download_utils/aria2_download.py
+++ b/bot/helper/mirror_utils/download_utils/aria2_download.py
@@ -19,7 +19,7 @@ def __onDownloadStarted(self, api, gid):
sleep(1)
dl = getDownloadByGid(gid)
download = aria2.get_download(gid)
- if STOP_DUPLICATE and dl is not None:
+ if STOP_DUPLICATE and dl is not None and not dl.getListener().isLeech:
LOGGER.info('Checking File/Folder if already in Drive...')
sname = aria2.get_download(gid).name
if dl.getListener().isTar:
@@ -89,8 +89,7 @@ def start_listener(self):
aria2.listen_to_notifications(threaded=True, on_download_start=self.__onDownloadStarted,
on_download_error=self.__onDownloadError,
on_download_stop=self.__onDownloadStopped,
- on_download_complete=self.__onDownloadComplete,
- timeout=1)
+ on_download_complete=self.__onDownloadComplete)
def add_download(self, link: str, path, listener, filename):
if is_magnet(link):
diff --git a/bot/helper/mirror_utils/download_utils/mega_downloader.py b/bot/helper/mirror_utils/download_utils/mega_downloader.py
index d9e76991056..7badafc62a2 100644
--- a/bot/helper/mirror_utils/download_utils/mega_downloader.py
+++ b/bot/helper/mirror_utils/download_utils/mega_downloader.py
@@ -167,7 +167,7 @@ def add_download(mega_link: str, path: str, listener):
node = folder_api.authorizeNode(mega_listener.node)
if mega_listener.error is not None:
return sendMessage(str(mega_listener.error), listener.bot, listener.update)
- if STOP_DUPLICATE:
+ if STOP_DUPLICATE and not listener.isLeech:
LOGGER.info('Checking File/Folder if already in Drive')
mname = node.getName()
if listener.isTar:
diff --git a/bot/helper/mirror_utils/download_utils/qbit_downloader.py b/bot/helper/mirror_utils/download_utils/qbit_downloader.py
index 7c2d27450e3..82c824f2e0d 100644
--- a/bot/helper/mirror_utils/download_utils/qbit_downloader.py
+++ b/bot/helper/mirror_utils/download_utils/qbit_downloader.py
@@ -23,9 +23,11 @@
from bot.helper.telegram_helper import button_build
LOGGER = logging.getLogger(__name__)
+logging.getLogger('qbittorrentapi').setLevel(logging.ERROR)
+logging.getLogger('requests').setLevel(logging.ERROR)
+logging.getLogger('urllib3').setLevel(logging.ERROR)
-
-class qbittorrent:
+class QbitTorrent:
def __init__(self):
@@ -51,9 +53,12 @@ def add_torrent(self, link, dire, listener, qbitsel):
self.ext_hash = get_hash_magnet(link)
tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
if len(tor_info) > 0:
- sendMessage("This Torrent is already in list.", listener.bot, listener.update)
- self.client.auth_log_out()
- return
+ if tor_info[0].state == "pausedDL":
+ self.client.torrents_delete(torrent_hashes=self.ext_hash)
+ else:
+ sendMessage("This Torrent is already in list.", listener.bot, listener.update)
+ self.client.auth_log_out()
+ return
if is_file:
op = self.client.torrents_add(torrent_files=[link], save_path=dire)
os.remove(link)
@@ -100,6 +105,7 @@ def add_torrent(self, link, dire, listener, qbitsel):
except:
deleteMessage(listener.bot, meta)
return False
+ time.sleep(0.5)
self.client.torrents_pause(torrent_hashes=self.ext_hash)
for n in str(self.ext_hash):
if n.isdigit():
@@ -142,13 +148,12 @@ def update(self):
if tor_info.state == "metaDL":
self.stalled_time = time.time()
if time.time() - self.meta_time >= 999999999: # timeout while downloading metadata
+ self.updater.cancel()
self.client.torrents_pause(torrent_hashes=self.ext_hash)
time.sleep(0.3)
self.listener.onDownloadError("Dead Torrent!")
self.client.torrents_delete(torrent_hashes=self.ext_hash)
self.client.auth_log_out()
- self.updater.cancel()
- return
elif tor_info.state == "downloading":
self.stalled_time = time.time()
if (TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None) and not self.checked:
@@ -162,31 +167,29 @@ def update(self):
result = check_limit(size, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT, is_tar_ext)
self.checked = True
if result:
+ self.updater.cancel()
self.client.torrents_pause(torrent_hashes=self.ext_hash)
time.sleep(0.3)
self.listener.onDownloadError(f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}")
self.client.torrents_delete(torrent_hashes=self.ext_hash)
self.client.auth_log_out()
- self.updater.cancel()
- return
elif tor_info.state == "stalledDL":
if time.time() - self.stalled_time >= 999999999: # timeout after downloading metadata
+ self.updater.cancel()
self.client.torrents_pause(torrent_hashes=self.ext_hash)
time.sleep(0.3)
self.listener.onDownloadError("Dead Torrent!")
self.client.torrents_delete(torrent_hashes=self.ext_hash)
self.client.auth_log_out()
- self.updater.cancel()
- return
elif tor_info.state == "error":
+ self.updater.cancel()
self.client.torrents_pause(torrent_hashes=self.ext_hash)
time.sleep(0.3)
self.listener.onDownloadError("No enough space for this torrent on device")
self.client.torrents_delete(torrent_hashes=self.ext_hash)
self.client.auth_log_out()
- self.updater.cancel()
- return
elif tor_info.state == "uploading" or tor_info.state.lower().endswith("up"):
+ self.updater.cancel()
self.client.torrents_pause(torrent_hashes=self.ext_hash)
if self.qbitsel:
for dirpath, subdir, files in os.walk(f"{self.dire}", topdown=False):
@@ -202,7 +205,6 @@ def update(self):
self.listener.onDownloadComplete()
self.client.torrents_delete(torrent_hashes=self.ext_hash)
self.client.auth_log_out()
- self.updater.cancel()
except:
self.updater.cancel()
@@ -216,8 +218,7 @@ def get_confirm(update, context):
if qdl is not None:
if user_id != qdl.listener.message.from_user.id:
query.answer(text="Don't waste your time!", show_alert=True)
- return
- if data[0] == "pin":
+ elif data[0] == "pin":
query.answer(text=data[2], show_alert=True)
elif data[0] == "done":
query.answer()
diff --git a/bot/helper/mirror_utils/download_utils/telegram_downloader.py b/bot/helper/mirror_utils/download_utils/telegram_downloader.py
index f4d330b66c2..b860e93956c 100644
--- a/bot/helper/mirror_utils/download_utils/telegram_downloader.py
+++ b/bot/helper/mirror_utils/download_utils/telegram_downloader.py
@@ -99,7 +99,7 @@ def add_download(self, message, path, filename):
path = path + name
if download:
- if STOP_DUPLICATE:
+ if STOP_DUPLICATE and not self.__listener.isLeech:
LOGGER.info('Checking File/Folder if already in Drive...')
gd = GoogleDriveHelper()
smsg, button = gd.drive_list(name, True, True)
diff --git a/bot/helper/mirror_utils/status_utils/aria_download_status.py b/bot/helper/mirror_utils/status_utils/aria_download_status.py
index a5d530f462a..c0a794dda4c 100644
--- a/bot/helper/mirror_utils/status_utils/aria_download_status.py
+++ b/bot/helper/mirror_utils/status_utils/aria_download_status.py
@@ -91,6 +91,9 @@ def cancel_download(self):
return
if len(download.followed_by_ids) != 0:
downloads = aria2.get_downloads(download.followed_by_ids)
+ self.__listener.onDownloadError('Download stopped by user!')
aria2.remove(downloads, force=True)
+ aria2.remove([download], force=True)
+ return
self.__listener.onDownloadError('Download stopped by user!')
aria2.remove([download], force=True)
diff --git a/bot/helper/mirror_utils/status_utils/split_status.py b/bot/helper/mirror_utils/status_utils/split_status.py
new file mode 100644
index 00000000000..54432971663
--- /dev/null
+++ b/bot/helper/mirror_utils/status_utils/split_status.py
@@ -0,0 +1,36 @@
+from .status import Status
+from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus
+
+
+class SplitStatus(Status):
+ def __init__(self, name, path, size):
+ self.__name = name
+ self.__path = path
+ self.__size = size
+
+ # The progress of Tar function cannot be tracked. So we just return dummy values.
+ # If this is possible in future,we should implement it
+
+ def progress(self):
+ return '0'
+
+ def speed(self):
+ return '0'
+
+ def name(self):
+ return self.__name
+
+ def path(self):
+ return self.__path
+
+ def size(self):
+ return get_readable_file_size(self.__size)
+
+ def eta(self):
+ return '0s'
+
+ def status(self):
+ return MirrorStatus.STATUS_SPLITTING
+
+ def processed_bytes(self):
+ return 0
diff --git a/bot/helper/mirror_utils/status_utils/tg_upload_status.py b/bot/helper/mirror_utils/status_utils/tg_upload_status.py
new file mode 100644
index 00000000000..a6e2d280488
--- /dev/null
+++ b/bot/helper/mirror_utils/status_utils/tg_upload_status.py
@@ -0,0 +1,61 @@
+from .status import Status
+from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
+from bot import DOWNLOAD_DIR
+
+
+class TgUploadStatus(Status):
+ def __init__(self, obj, size, gid, listener):
+ self.obj = obj
+ self.__size = size
+ self.uid = listener.uid
+ self.message = listener.message
+ self.__gid = gid
+
+ def path(self):
+ return f"{DOWNLOAD_DIR}{self.uid}"
+
+ def processed_bytes(self):
+ return self.obj.uploaded_bytes
+
+ def size_raw(self):
+ return self.__size
+
+ def size(self):
+ return get_readable_file_size(self.__size)
+
+ def status(self):
+ return MirrorStatus.STATUS_UPLOADING
+
+ def name(self):
+ return self.obj.name
+
+ def progress_raw(self):
+ try:
+ return self.obj.uploaded_bytes / self.__size * 100
+ except ZeroDivisionError:
+ return 0
+
+ def progress(self):
+ return f'{round(self.progress_raw(), 2)}%'
+
+ def speed_raw(self):
+ """
+ :return: Upload speed in Bytes/Seconds
+ """
+ return self.obj.speed()
+
+ def speed(self):
+ return f'{get_readable_file_size(self.speed_raw())}/s'
+
+ def eta(self):
+ try:
+ seconds = (self.__size - self.obj.uploaded_bytes) / self.speed_raw()
+ return f'{get_readable_time(seconds)}'
+ except ZeroDivisionError:
+ return '-'
+
+ def gid(self) -> str:
+ return self.__gid
+
+ def download(self):
+ return self.obj
diff --git a/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py
new file mode 100644
index 00000000000..4087c2e575d
--- /dev/null
+++ b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py
@@ -0,0 +1,150 @@
+import os
+import logging
+import time
+
+from pyrogram.errors import FloodWait
+from hachoir.parser import createParser
+from hachoir.metadata import extractMetadata
+
+from bot import app, DOWNLOAD_DIR, AS_DOCUMENT, AS_DOC_USERS, AS_MEDIA_USERS
+from bot.helper.ext_utils.fs_utils import get_mime_type, take_ss
+
+LOGGER = logging.getLogger(__name__)
+logging.getLogger("pyrogram").setLevel(logging.WARNING)
+
+VIDEO_SUFFIXES = ("M4V", "MP4", "MOV", "FLV", "WMV", "3GP", "MPG", "WEBM", "MKV", "AVI")
+AUDIO_SUFFIXES = ("MP3", "M4A", "M4B", "FLAC", "WAV", "AIF", "OGG", "AAC", "DTS", "MID", "AMR")
+IMAGE_SUFFIXES = ("JPG", "JPX", "PNG", "GIF", "WEBP", "CR2", "TIF", "BMP", "JXR", "PSD", "ICO", "HEIC")
+
+
+class TgUploader:
+
+ def __init__(self, name=None, listener=None):
+ self.__listener = listener
+ self.name = name
+ self.__app = app
+ self.total_bytes = 0
+ self.uploaded_bytes = 0
+ self.last_uploaded = 0
+ self.start_time = time.time()
+ self.is_cancelled = False
+ self.chat_id = listener.message.chat.id
+ self.message_id = listener.uid
+ self.user_id = listener.message.from_user.id
+ self.as_doc = AS_DOCUMENT
+ self.thumb = f"Thumbnails/{self.user_id}.jpg"
+ self.sent_msg = self.__app.get_messages(self.chat_id, self.message_id)
+
+ def upload(self):
+ msgs_dict = {}
+ path = f"{DOWNLOAD_DIR}{self.message_id}"
+ self.user_settings()
+ for dirpath, subdir, files in sorted(os.walk(path)):
+ for file in sorted(files):
+ up_path = os.path.join(dirpath, file)
+ self.upload_file(up_path, file)
+ msgs_dict[file] = self.sent_msg.message_id
+ os.remove(up_path)
+ self.last_uploaded = 0
+ LOGGER.info("Leeching Done!")
+ self.__listener.onUploadComplete(self.name, None, msgs_dict, None, None)
+
+ def upload_file(self, up_path, file):
+ notMedia = False
+ thumb = self.thumb
+ try:
+ if not self.as_doc:
+ ftype = get_mime_type(up_path)
+ ftype = ftype.split("/")[0]
+ ftype = ftype.lower().strip()
+ duration = 0
+ if ftype == "video" or file.upper().endswith(VIDEO_SUFFIXES):
+ width = 0
+ height = 0
+ metadata = extractMetadata(createParser(up_path))
+ if metadata.has("duration"):
+ duration = metadata.get("duration").seconds
+ if thumb is None:
+ thumb, width, height = take_ss(up_path, duration)
+ self.sent_msg = self.sent_msg.reply_video(video=up_path,
+ quote=True,
+ caption=file,
+ parse_mode="html",
+ duration=duration,
+ width=width,
+ height=height,
+ thumb=thumb,
+ supports_streaming=True,
+ disable_notification=True,
+ progress=self.upload_progress)
+ if self.thumb is None:
+ os.remove(thumb)
+ elif ftype == "audio" or file.upper().endswith(AUDIO_SUFFIXES):
+ title = None
+ artist = None
+ metadata = extractMetadata(createParser(up_path))
+ if metadata.has("duration"):
+ duration = metadata.get('duration').seconds
+ if metadata.has("title"):
+ title = metadata.get("title")
+ if metadata.has("artist"):
+ artist = metadata.get("artist")
+ self.sent_msg = self.sent_msg.reply_audio(audio=up_path,
+ quote=True,
+ caption=file,
+ parse_mode="html",
+ duration=duration,
+ performer=artist,
+ title=title,
+ thumb=thumb,
+ disable_notification=True,
+ progress=self.upload_progress)
+ elif ftype == "image" or file.upper().endswith(IMAGE_SUFFIXES):
+ self.sent_msg = self.sent_msg.reply_photo(photo=up_path,
+ quote=True,
+ caption=file,
+ parse_mode="html",
+ supports_streaming=True,
+ disable_notification=True,
+ progress=self.upload_progress)
+ else:
+ notMedia = True
+ if self.as_doc or notMedia:
+ self.sent_msg = self.sent_msg.reply_document(document=up_path,
+ quote=True,
+ thumb=thumb,
+ caption=file,
+ parse_mode="html",
+ disable_notification=True,
+ progress=self.upload_progress)
+ except FloodWait as f:
+ LOGGER.info(f)
+ time.sleep(f.x)
+ def upload_progress(self, current, total):
+ if self.is_cancelled:
+ self.__app.stop_transmission()
+ return
+ chunk_size = current - self.last_uploaded
+ self.last_uploaded = current
+ self.uploaded_bytes += chunk_size
+
+ def user_settings(self):
+ if self.user_id in AS_DOC_USERS:
+ self.as_doc = True
+ elif self.user_id in AS_MEDIA_USERS:
+ self.as_doc = False
+ if not os.path.lexists(self.thumb):
+ self.thumb = None
+
+ def speed(self):
+ try:
+ return self.uploaded_bytes / (time.time() - self.start_time)
+ except ZeroDivisionError:
+ return 0
+
+ def cancel_download(self):
+ self.is_cancelled = True
+ LOGGER.info(f"Cancelling Upload: {self.name}")
+ self.__listener.onUploadError('your upload has been stopped!')
+
+
diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py
index 04f34bf76c6..1c6a9308406 100644
--- a/bot/helper/telegram_helper/bot_commands.py
+++ b/bot/helper/telegram_helper/bot_commands.py
@@ -33,5 +33,18 @@ def __init__(self):
self.ShellCommand = 'shell'
self.ExecHelpCommand = 'exechelp'
self.TsHelpCommand = 'tshelp'
+ self.LeechSetCommand = 'leechset'
+ self.SetThumbCommand = 'setthumb'
+ self.LeechCommand = 'leech'
+ self.TarLeechCommand = 'tarleech'
+ self.UnzipLeechCommand = 'unzipleech'
+ self.ZipLeechCommand = 'zipleech'
+ self.QbLeechCommand = 'qbleech'
+ self.QbTarLeechCommand = 'qbtarleech'
+ self.QbUnzipLeechCommand = 'qbunzipleech'
+ self.QbZipLeechCommand = 'qbzipleech'
+ self.LeechWatchCommand = 'leechwatch'
+ self.LeechTarWatchCommand = 'leechtarwatch'
+ self.LeechZipWatchCommand = 'leechzipwatch'
BotCommands = _BotCommands()
diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py
index 7b500f07c28..bcb415f4cfb 100644
--- a/bot/helper/telegram_helper/message_utils.py
+++ b/bot/helper/telegram_helper/message_utils.py
@@ -71,11 +71,9 @@ def update_all_messages():
free = get_readable_file_size(free)
currentTime = get_readable_time(time.time() - botStartTime)
msg, buttons = get_readable_message()
- if msg is None:
- return
- msg += f"CPU: {psutil.cpu_percent()}%
" \
- f" RAM: {psutil.virtual_memory().percent}%
" \
- f" DISK: {psutil.disk_usage('/').percent}%
"
+ msg += f"CPU: {psutil.cpu_percent()}%" \
+ f" RAM: {psutil.virtual_memory().percent}%" \
+ f" DISK: {psutil.disk_usage('/').percent}%"
with download_dict_lock:
dlspeed_bytes = 0
uldl_bytes = 0
@@ -93,7 +91,7 @@ def update_all_messages():
uldl_bytes += float(speedy.split('M')[0]) * 1048576
dlspeed = get_readable_file_size(dlspeed_bytes)
ulspeed = get_readable_file_size(uldl_bytes)
- msg += f"\nFREE: {free}
| UPTIME: {currentTime}
\nDL: {dlspeed}/s
🔻 | UL: {ulspeed}/s
🔺\n"
+ msg += f"\nFREE: {free} | UPTIME: {currentTime}\nDL: {dlspeed}/s 🔻 | UL: {ulspeed}/s 🔺\n"
with status_reply_dict_lock:
for chat_id in list(status_reply_dict.keys()):
if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id].text:
@@ -114,11 +112,9 @@ def sendStatusMessage(msg, bot):
free = get_readable_file_size(free)
currentTime = get_readable_time(time.time() - botStartTime)
progress, buttons = get_readable_message()
- if progress is None:
- progress, buttons = get_readable_message()
- progress += f"CPU: {psutil.cpu_percent()}%
" \
- f" RAM: {psutil.virtual_memory().percent}%
" \
- f" DISK: {psutil.disk_usage('/').percent}%
"
+ progress += f"CPU: {psutil.cpu_percent()}%" \
+ f" RAM: {psutil.virtual_memory().percent}%" \
+ f" DISK: {psutil.disk_usage('/').percent}%"
with download_dict_lock:
dlspeed_bytes = 0
uldl_bytes = 0
@@ -136,7 +132,7 @@ def sendStatusMessage(msg, bot):
uldl_bytes += float(speedy.split('M')[0]) * 1048576
dlspeed = get_readable_file_size(dlspeed_bytes)
ulspeed = get_readable_file_size(uldl_bytes)
- progress += f"\nFREE: {free}
| UPTIME: {currentTime}
\nDL: {dlspeed}/s
🔻 | UL: {ulspeed}/s
🔺\n"
+ progress += f"\nFREE: {free} | UPTIME: {currentTime}\nDL: {dlspeed}/s 🔻 | UL: {ulspeed}/s 🔺\n"
with status_reply_dict_lock:
if msg.message.chat.id in list(status_reply_dict.keys()):
try:
diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py
index f50fa21a33d..0e2fd6d34ab 100644
--- a/bot/modules/authorize.py
+++ b/bot/modules/authorize.py
@@ -2,8 +2,6 @@
from bot import AUTHORIZED_CHATS, SUDO_USERS, dispatcher, DB_URI
from telegram.ext import CommandHandler
from bot.helper.telegram_helper.filters import CustomFilters
-from telegram.ext import Filters
-from telegram import Update
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.ext_utils.db_handler import DbManger
diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py
index f94a38157a7..203110f6a4c 100644
--- a/bot/modules/cancel_mirror.py
+++ b/bot/modules/cancel_mirror.py
@@ -45,6 +45,8 @@ def cancel_mirror(update, context):
sendMessage("Archival in Progress, You Can't Cancel It.", context.bot, update)
elif dl.status() == MirrorStatus.STATUS_EXTRACTING:
sendMessage("Extract in Progress, You Can't Cancel It.", context.bot, update)
+ elif dl.status() == MirrorStatus.STATUS_SPLITTING:
+ sendMessage("Split in Progress, You Can't Cancel It.", context.bot, update)
else:
dl.download().cancel_download()
sleep(3) # incase of any error with ondownloaderror listener
diff --git a/bot/modules/leech_settings.py b/bot/modules/leech_settings.py
new file mode 100644
index 00000000000..729a152e4de
--- /dev/null
+++ b/bot/modules/leech_settings.py
@@ -0,0 +1,124 @@
+import os
+import threading
+
+from PIL import Image
+from telegram.ext import CommandHandler, CallbackQueryHandler
+from telegram import InlineKeyboardMarkup
+
+from bot import AS_DOC_USERS, AS_MEDIA_USERS, dispatcher, AS_DOCUMENT, app, AUTO_DELETE_MESSAGE_DURATION
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, auto_delete_message
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper import button_build
+
+
+def leechSet(update, context):
+ user_id = update.message.from_user.id
+ path = f"Thumbnails/{user_id}.jpg"
+ msg = f"Leech Type for {user_id} user is "
+ if user_id in AS_DOC_USERS:
+ msg += "DOCUMENT"
+ elif user_id in AS_MEDIA_USERS:
+ msg += "MEDIA"
+ elif AS_DOCUMENT:
+ msg += "DOCUMENT"
+ elif not AS_DOCUMENT:
+ msg += "MEDIA"
+ msg += "\nCustom Thmubnail "
+ if os.path.exists(path):
+ msg += "exists"
+ else:
+ msg += "not exists"
+ buttons = button_build.ButtonMaker()
+ buttons.sbutton("As Document", f"doc {user_id}")
+ buttons.sbutton("As Media", f"med {user_id}")
+ buttons.sbutton("Delete Thumbnail", f"thumb {user_id}")
+ if AUTO_DELETE_MESSAGE_DURATION == -1:
+ buttons.sbutton("Close", f"closeset {user_id}")
+ button = InlineKeyboardMarkup(buttons.build_menu(2))
+ choose_msg = sendMarkup(msg, context.bot, update, button)
+ threading.Thread(target=auto_delete_message, args=(context.bot, update.message, choose_msg)).start()
+
+def setLeechType(update, context):
+ query = update.callback_query
+ user_id = query.from_user.id
+ data = query.data
+ data = data.split(" ")
+ if user_id != int(data[1]):
+ query.answer(text="Not Yours!", show_alert=True)
+ elif data[0] == "doc":
+ if user_id in AS_DOC_USERS:
+ query.answer(text="Already As Document!", show_alert=True)
+ query.message.delete()
+ elif user_id in AS_MEDIA_USERS:
+ AS_MEDIA_USERS.remove(user_id)
+ AS_DOC_USERS.add(user_id)
+ query.answer(text="Done!", show_alert=True)
+ query.message.delete()
+ elif AS_DOCUMENT:
+ query.answer(text="Already As Document!", show_alert=True)
+ query.message.delete()
+ elif not AS_DOCUMENT:
+ AS_DOC_USERS.add(user_id)
+ query.answer(text="Done!", show_alert=True)
+ query.message.delete()
+ elif data[0] == "med":
+ if user_id in AS_DOC_USERS:
+ AS_DOC_USERS.remove(user_id)
+ AS_MEDIA_USERS.add(user_id)
+ query.answer(text="Done!", show_alert=True)
+ query.message.delete()
+ elif user_id in AS_MEDIA_USERS:
+ query.answer(text="Already As Media!", show_alert=True)
+ query.message.delete()
+ elif AS_DOCUMENT:
+ AS_MEDIA_USERS.add(user_id)
+ query.answer(text="Done!", show_alert=True)
+ query.message.delete()
+ elif not AS_DOCUMENT:
+ query.answer(text="Already As Media!", show_alert=True)
+ query.message.delete()
+ elif data[0] == "thumb":
+ path = f"Thumbnails/{user_id}.jpg"
+ if os.path.lexists(path):
+ os.remove(path)
+ query.answer(text="Done!", show_alert=True)
+ else:
+ query.answer(text="No Thumbnail To Delete!", show_alert=True)
+ query.message.delete()
+ elif data[0] == "closeset":
+ query.message.delete()
+
+def setThumb(update, context):
+ user_id = update.message.from_user.id
+ reply_to = update.message.reply_to_message
+ if reply_to.photo:
+ path = "Thumbnails"
+ if not os.path.exists(path):
+ os.mkdir(path)
+ photo_msg = app.get_messages(update.message.chat.id, reply_to_message_ids=update.message.message_id)
+ photo_dir = app.download_media(photo_msg, file_name=path)
+ des_dir = os.path.join(path, str(user_id) + ".jpg")
+ # Image.open(photo_dir).convert("RGB").save(photo_dir)
+ img = Image.open(photo_dir)
+ w, h = img.size
+ img.thumbnail((320, h))
+ # img.resize((320, h))
+ img.save(des_dir, "JPEG")
+ os.remove(photo_dir)
+ sendMessage(f"Custom thumbnail saved for {user_id} user.", context.bot, update)
+ else:
+ sendMessage("Reply to a photo to save custom thumbnail.", context.bot, update)
+
+leech_set_handler = CommandHandler(BotCommands.LeechSetCommand, leechSet, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+set_thumbnail_handler = CommandHandler(BotCommands.SetThumbCommand, setThumb, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+as_doc_handler = CallbackQueryHandler(setLeechType, pattern="doc", run_async=True)
+as_media_handler = CallbackQueryHandler(setLeechType, pattern="med", run_async=True)
+del_thumb_handler = CallbackQueryHandler(setLeechType, pattern="thumb", run_async=True)
+close_set_handler = CallbackQueryHandler(setLeechType, pattern="closeset", run_async=True)
+dispatcher.add_handler(leech_set_handler)
+dispatcher.add_handler(as_doc_handler)
+dispatcher.add_handler(as_media_handler)
+dispatcher.add_handler(close_set_handler)
+dispatcher.add_handler(set_thumbnail_handler)
+dispatcher.add_handler(del_thumb_handler)
diff --git a/bot/modules/mirror.py b/bot/modules/mirror.py
index 7377dda7c84..109418ba80d 100644
--- a/bot/modules/mirror.py
+++ b/bot/modules/mirror.py
@@ -1,49 +1,56 @@
import requests
+import urllib
+import pathlib
+import os
+import subprocess
+import threading
+import re
+import random
+import string
+import time
+import shutil
+
from telegram.ext import CommandHandler
from telegram import InlineKeyboardMarkup
from fnmatch import fnmatch
-from bot import Interval, INDEX_URL, BUTTON_FOUR_NAME, BUTTON_FOUR_URL, BUTTON_FIVE_NAME, BUTTON_FIVE_URL, BUTTON_SIX_NAME, BUTTON_SIX_URL, BLOCK_MEGA_FOLDER, BLOCK_MEGA_LINKS, VIEW_LINK, aria2
-from bot import dispatcher, DOWNLOAD_DIR, download_dict, download_dict_lock, SHORTENER, SHORTENER_API, TAR_UNZIP_LIMIT
+from bot import Interval, INDEX_URL, BUTTON_FOUR_NAME, BUTTON_FOUR_URL, BUTTON_FIVE_NAME, BUTTON_FIVE_URL, \
+ BUTTON_SIX_NAME, BUTTON_SIX_URL, BLOCK_MEGA_FOLDER, BLOCK_MEGA_LINKS, VIEW_LINK, aria2, \
+ dispatcher, DOWNLOAD_DIR, download_dict, download_dict_lock, SHORTENER, SHORTENER_API, \
+ TAR_UNZIP_LIMIT, TG_SPLIT_SIZE, OWNER_ID
from bot.helper.ext_utils import fs_utils, bot_utils
from bot.helper.ext_utils.shortenurl import short_url
from bot.helper.ext_utils.exceptions import DirectDownloadLinkException, NotSupportedExtractionArchive
from bot.helper.mirror_utils.download_utils.aria2_download import AriaDownloadHelper
from bot.helper.mirror_utils.download_utils.mega_downloader import MegaDownloadHelper
-from bot.helper.mirror_utils.download_utils.qbit_downloader import qbittorrent
+from bot.helper.mirror_utils.download_utils.qbit_downloader import QbitTorrent
from bot.helper.mirror_utils.download_utils.direct_link_generator import direct_link_generator
from bot.helper.mirror_utils.download_utils.telegram_downloader import TelegramDownloadHelper
from bot.helper.mirror_utils.status_utils import listeners
from bot.helper.mirror_utils.status_utils.extract_status import ExtractStatus
from bot.helper.mirror_utils.status_utils.tar_status import TarStatus
+from bot.helper.mirror_utils.status_utils.split_status import SplitStatus
from bot.helper.mirror_utils.status_utils.upload_status import UploadStatus
+from bot.helper.mirror_utils.status_utils.tg_upload_status import TgUploadStatus
from bot.helper.mirror_utils.status_utils.gdownload_status import DownloadStatus
-from bot.helper.mirror_utils.upload_utils import gdriveTools
+from bot.helper.mirror_utils.upload_utils import gdriveTools, pyrogramEngine
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.message_utils import *
from bot.helper.telegram_helper import button_build
-import urllib
-import pathlib
-import os
-import subprocess
-import threading
-import re
-import random
-import string
-import time
-import shutil
ariaDlManager = AriaDownloadHelper()
ariaDlManager.start_listener()
+
class MirrorListener(listeners.MirrorListeners):
- def __init__(self, bot, update, pswd, isTar=False, extract=False, isZip=False, isQbit=False):
+ def __init__(self, bot, update, pswd, isTar=False, extract=False, isZip=False, isQbit=False, isLeech=False):
super().__init__(bot, update)
self.isTar = isTar
self.extract = extract
self.isZip = isZip
self.isQbit = isQbit
+ self.isLeech = isLeech
self.pswd = pswd
def onDownloadStarted(self):
@@ -130,14 +137,36 @@ def onDownloadComplete(self):
path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
up_name = pathlib.PurePath(path).name
up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
- LOGGER.info(f"Upload Name: {up_name}")
- drive = gdriveTools.GoogleDriveHelper(up_name, self)
size = fs_utils.get_path_size(up_path)
- upload_status = UploadStatus(drive, size, gid, self)
- with download_dict_lock:
- download_dict[self.uid] = upload_status
- update_all_messages()
- drive.upload(up_name)
+ if self.isLeech:
+ checked = False
+ for dirpath, subdir, files in os.walk(f'{DOWNLOAD_DIR}{self.uid}', topdown=False):
+ for file in files:
+ f_path = os.path.join(dirpath, file)
+ f_size = os.path.getsize(f_path)
+ if int(f_size) > TG_SPLIT_SIZE:
+ if not checked:
+ checked = True
+ with download_dict_lock:
+ download_dict[self.uid] = SplitStatus(up_name, up_path, size)
+ LOGGER.info(f"Splitting: {up_name}")
+ fs_utils.split(f_path, f_size, TG_SPLIT_SIZE)
+ os.remove(f_path)
+ LOGGER.info(f"Leech Name: {up_name}")
+ tg = pyrogramEngine.TgUploader(up_name, self)
+ tg_upload_status = TgUploadStatus(tg, size, gid, self)
+ with download_dict_lock:
+ download_dict[self.uid] = tg_upload_status
+ update_all_messages()
+ tg.upload()
+ else:
+ LOGGER.info(f"Upload Name: {up_name}")
+ drive = gdriveTools.GoogleDriveHelper(up_name, self)
+ upload_status = UploadStatus(drive, size, gid, self)
+ with download_dict_lock:
+ download_dict[self.uid] = upload_status
+ update_all_messages()
+ drive.upload(up_name)
def onDownloadError(self, error):
error = error.replace('<', ' ')
@@ -168,6 +197,44 @@ def onUploadProgress(self):
pass
def onUploadComplete(self, link: str, size, files, folders, typ):
+ if self.isLeech:
+ if self.message.from_user.username:
+ uname = f"@{self.message.from_user.username}"
+ else:
+ uname = f'{self.message.from_user.first_name}'
+ chat_id = str(self.message.chat.id)
+ count = len(files)
+ if OWNER_ID == int(chat_id):
+ msg = f'Name: {link}\n'
+ msg += f'Total Files: {count}'
+ sendMessage(msg, self.bot, self.update)
+ else:
+ chat_id = chat_id[4:]
+ msg = f"Name: {link}\n"
+ msg += f'Total Files: {count}\n'
+ msg += f'cc: {uname}\n\n'
+ fmsg = ''
+ for index, item in enumerate(list(files), start=1):
+ msg_id = files[item]
+ link = f"https://t.me/c/{chat_id}/{msg_id}"
+ fmsg += f"{index}. {item}\n"
+ if len(fmsg) > 3900:
+ sendMessage(msg + fmsg, self.bot, self.update)
+ fmsg = ''
+ if fmsg != '':
+ sendMessage(msg + fmsg, self.bot, self.update)
+ with download_dict_lock:
+ try:
+ fs_utils.clean_download(download_dict[self.uid].path())
+ except FileNotFoundError:
+ pass
+ del download_dict[self.uid]
+ count = len(download_dict)
+ if count == 0:
+ self.clean()
+ else:
+ update_all_messages()
+ return
with download_dict_lock:
msg = f'Filename: {download_dict[self.uid].name()}
\nSize: {size}
'
if os.path.isdir(f'{DOWNLOAD_DIR}/{self.uid}/{download_dict[self.uid].name()}'):
@@ -250,7 +317,7 @@ def onUploadError(self, error):
else:
update_all_messages()
-def _mirror(bot, update, isTar=False, extract=False, isZip=False, isQbit=False):
+def _mirror(bot, update, isTar=False, extract=False, isZip=False, isQbit=False, isLeech=False):
mesg = update.message.text.split('\n')
message_args = mesg[0].split(' ')
name_args = mesg[0].split('|')
@@ -311,7 +378,7 @@ def _mirror(bot, update, isTar=False, extract=False, isZip=False, isQbit=False):
file.get_file().download(custom_path=f"{file_name}")
link = f"{file_name}"
elif file.mime_type != "application/x-bittorrent":
- listener = MirrorListener(bot, update, pswd, isTar, extract, isZip)
+ listener = MirrorListener(bot, update, pswd, isTar, extract, isZip, isLeech=isLeech)
tg_downloader = TelegramDownloadHelper(listener)
ms = update.message
tg_downloader.add_download(ms, f'{DOWNLOAD_DIR}{listener.uid}/', name)
@@ -343,10 +410,10 @@ def _mirror(bot, update, isTar=False, extract=False, isZip=False, isQbit=False):
sendMessage(f"{e}", bot, update)
return
- listener = MirrorListener(bot, update, pswd, isTar, extract, isZip, isQbit)
+ listener = MirrorListener(bot, update, pswd, isTar, extract, isZip, isQbit, isLeech)
if bot_utils.is_gdrive_link(link):
- if not isTar and not extract:
+ if not isTar and not extract and not isLeech:
sendMessage(f"Use /{BotCommands.CloneCommand} to clone Google Drive file/folder\nUse /{BotCommands.TarMirrorCommand} to make tar of Google Drive folder\nUse /{BotCommands.UnzipMirrorCommand} to extracts archive Google Drive file", bot, update)
return
res, size, name, files = gdriveTools.GoogleDriveHelper().clonehelper(link)
@@ -357,7 +424,7 @@ def _mirror(bot, update, isTar=False, extract=False, isZip=False, isQbit=False):
result = bot_utils.check_limit(size, TAR_UNZIP_LIMIT)
if result:
msg = f'Failed, Tar/Unzip limit is {TAR_UNZIP_LIMIT}.\nYour File/Folder size is {get_readable_file_size(size)}.'
- sendMessage(msg, listener.bot, listener.update)
+ sendMessage(msg, bot, update)
return
LOGGER.info(f"Download Name : {name}")
drive = gdriveTools.GoogleDriveHelper(name, listener)
@@ -380,7 +447,7 @@ def _mirror(bot, update, isTar=False, extract=False, isZip=False, isQbit=False):
mega_dl.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener)
elif isQbit and (bot_utils.is_magnet(link) or os.path.exists(link)):
- qbit = qbittorrent()
+ qbit = QbitTorrent()
qbit.add_torrent(link, f'{DOWNLOAD_DIR}{listener.uid}/', listener, qbitsel)
else:
@@ -412,6 +479,30 @@ def qb_unzip_mirror(update, context):
def qb_zip_mirror(update, context):
_mirror(context.bot, update, True, isZip=True, isQbit=True)
+def leech(update, context):
+ _mirror(context.bot, update, isLeech=True)
+
+def tar_leech(update, context):
+ _mirror(context.bot, update, True, isLeech=True)
+
+def unzip_leech(update, context):
+ _mirror(context.bot, update, extract=True, isLeech=True)
+
+def zip_leech(update, context):
+ _mirror(context.bot, update, True, isZip=True, isLeech=True)
+
+def qb_leech(update, context):
+ _mirror(context.bot, update, isQbit=True, isLeech=True)
+
+def qb_tar_leech(update, context):
+ _mirror(context.bot, update, True, isQbit=True, isLeech=True)
+
+def qb_unzip_leech(update, context):
+ _mirror(context.bot, update, extract=True, isQbit=True, isLeech=True)
+
+def qb_zip_leech(update, context):
+ _mirror(context.bot, update, True, isZip=True, isQbit=True, isLeech=True)
+
mirror_handler = CommandHandler(BotCommands.MirrorCommand, mirror,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
tar_mirror_handler = CommandHandler(BotCommands.TarMirrorCommand, tar_mirror,
@@ -428,6 +519,22 @@ def qb_zip_mirror(update, context):
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
qb_zip_mirror_handler = CommandHandler(BotCommands.QbZipMirrorCommand, qb_zip_mirror,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+leech_handler = CommandHandler(BotCommands.LeechCommand, leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+tar_leech_handler = CommandHandler(BotCommands.TarLeechCommand, tar_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+unzip_leech_handler = CommandHandler(BotCommands.UnzipLeechCommand, unzip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+zip_leech_handler = CommandHandler(BotCommands.ZipLeechCommand, zip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+qb_leech_handler = CommandHandler(BotCommands.QbLeechCommand, qb_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+qb_tar_leech_handler = CommandHandler(BotCommands.QbTarLeechCommand, qb_tar_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+qb_unzip_leech_handler = CommandHandler(BotCommands.QbUnzipLeechCommand, qb_unzip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+qb_zip_leech_handler = CommandHandler(BotCommands.QbZipLeechCommand, qb_zip_leech,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
dispatcher.add_handler(mirror_handler)
dispatcher.add_handler(tar_mirror_handler)
dispatcher.add_handler(unzip_mirror_handler)
@@ -436,3 +543,11 @@ def qb_zip_mirror(update, context):
dispatcher.add_handler(qb_tar_mirror_handler)
dispatcher.add_handler(qb_unzip_mirror_handler)
dispatcher.add_handler(qb_zip_mirror_handler)
+dispatcher.add_handler(leech_handler)
+dispatcher.add_handler(tar_leech_handler)
+dispatcher.add_handler(unzip_leech_handler)
+dispatcher.add_handler(zip_leech_handler)
+dispatcher.add_handler(qb_leech_handler)
+dispatcher.add_handler(qb_tar_leech_handler)
+dispatcher.add_handler(qb_unzip_leech_handler)
+dispatcher.add_handler(qb_zip_leech_handler)
diff --git a/bot/modules/watch.py b/bot/modules/watch.py
index d38bd267452..fe80d2117ae 100644
--- a/bot/modules/watch.py
+++ b/bot/modules/watch.py
@@ -9,7 +9,7 @@
import threading
-def _watch(bot: Bot, update, isTar=False, isZip=False):
+def _watch(bot: Bot, update, isTar=False, isZip=False, isLeech=False):
mssg = update.message.text
message_args = mssg.split(' ')
name_args = mssg.split('|')
@@ -44,11 +44,13 @@ def _watch(bot: Bot, update, isTar=False, isZip=False):
name = ""
pswd = ""
- listener = MirrorListener(bot, update, pswd, isTar, isZip=isZip)
+ listener = MirrorListener(bot, update, pswd, isTar, isZip=isZip, isLeech=isLeech)
ydl = YoutubeDLHelper(listener)
threading.Thread(target=ydl.add_download,args=(link, f'{DOWNLOAD_DIR}{listener.uid}', qual, name)).start()
sendStatusMessage(update, bot)
+def watch(update, context):
+ _watch(context.bot, update)
def watchTar(update, context):
_watch(context.bot, update, True)
@@ -56,18 +58,31 @@ def watchTar(update, context):
def watchZip(update, context):
_watch(context.bot, update, True, True)
-def watch(update, context):
- _watch(context.bot, update)
+def leechWatch(update, context):
+ _watch(context.bot, update, isLeech=True)
+
+def leechWatchTar(update, context):
+ _watch(context.bot, update, True, isLeech=True)
+def leechWatchZip(update, context):
+ _watch(context.bot, update, True, True, True)
-mirror_handler = CommandHandler(BotCommands.WatchCommand, watch,
+watch_handler = CommandHandler(BotCommands.WatchCommand, watch,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
-tar_mirror_handler = CommandHandler(BotCommands.TarWatchCommand, watchTar,
+tar_watch_handler = CommandHandler(BotCommands.TarWatchCommand, watchTar,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
-zip_mirror_handler = CommandHandler(BotCommands.ZipWatchCommand, watchZip,
+zip_watch_handler = CommandHandler(BotCommands.ZipWatchCommand, watchZip,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+leech_watch_handler = CommandHandler(BotCommands.LeechWatchCommand, leechWatch,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+leech_tar_watch_handler = CommandHandler(BotCommands.LeechTarWatchCommand, leechWatchTar,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+leech_zip_watch_handler = CommandHandler(BotCommands.LeechZipWatchCommand, leechWatchZip,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
-
-dispatcher.add_handler(mirror_handler)
-dispatcher.add_handler(tar_mirror_handler)
-dispatcher.add_handler(zip_mirror_handler)
+dispatcher.add_handler(watch_handler)
+dispatcher.add_handler(tar_watch_handler)
+dispatcher.add_handler(zip_watch_handler)
+dispatcher.add_handler(leech_watch_handler)
+dispatcher.add_handler(leech_tar_watch_handler)
+dispatcher.add_handler(leech_zip_watch_handler)
diff --git a/config_sample.env b/config_sample.env
index bf09cd7ea70..d105d8d1f61 100644
--- a/config_sample.env
+++ b/config_sample.env
@@ -6,7 +6,7 @@ BOT_TOKEN = ""
GDRIVE_FOLDER_ID = ""
OWNER_ID =
DOWNLOAD_DIR = "/usr/src/app/downloads"
-DOWNLOAD_STATUS_UPDATE_INTERVAL = 5
+DOWNLOAD_STATUS_UPDATE_INTERVAL = 8
AUTO_DELETE_MESSAGE_DURATION = 20
IS_TEAM_DRIVE = ""
TELEGRAM_API =
@@ -19,6 +19,8 @@ IGNORE_PENDING_REQUESTS = ""
USE_SERVICE_ACCOUNTS = ""
INDEX_URL = ""
STATUS_LIMIT = "" # Recommend limit status to 4 tasks
+TG_SPLIT_SIZE = "" # leave it empty for max size(2GB)
+AS_DOCUMENT = ""
UPTOBOX_TOKEN = ""
MEGA_API_KEY = ""
MEGA_EMAIL_ID = ""
diff --git a/requirements.txt b/requirements.txt
index f0dace548a1..4a547401d25 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,13 +6,16 @@ attrdict
beautifulsoup4
cloudscrape
feedparser
+filesplit
google-api-python-client
google-auth-httplib2
google-auth-oauthlib
gunicorn
+hachoir
js2py
lk21
lxml
+Pillow
psutil
psycopg2-binary
pybase64