From bfa9292d521b8016dd01d25df0a8a3b27dc48fa9 Mon Sep 17 00:00:00 2001
From: Sourcery AI <>
Date: Thu, 12 May 2022 18:59:26 +0000
Subject: [PATCH] 'Refactored by Sourcery'
---
add_to_team_drive.py | 2 +-
bot/__init__.py | 43 ++++-------
bot/__main__.py | 11 ++-
bot/helper/ext_utils/db_handler.py | 77 +++++++++----------
bot/helper/ext_utils/fs_utils.py | 6 +-
.../download_utils/aria2_download.py | 65 ++++++++--------
.../download_utils/direct_link_generator.py | 27 ++++---
.../download_utils/gd_downloader.py | 2 +-
.../download_utils/mega_download.py | 12 ++-
.../download_utils/mega_downloader.py | 4 +-
.../download_utils/qbit_downloader.py | 2 +-
.../download_utils/telegram_downloader.py | 6 +-
.../youtube_dl_download_helper.py | 20 ++---
.../status_utils/qbit_download_status.py | 7 +-
.../mirror_utils/upload_utils/gdriveTools.py | 45 +++++------
.../upload_utils/pyrogramEngine.py | 6 +-
bot/helper/telegram_helper/filters.py | 23 +++---
bot/modules/authorize.py | 44 ++---------
bot/modules/cancel_mirror.py | 16 ++--
bot/modules/clone.py | 15 ++--
bot/modules/leech_settings.py | 2 +-
bot/modules/mirror.py | 18 +++--
bot/modules/mirror_status.py | 3 +-
bot/modules/rss.py | 6 +-
bot/modules/search.py | 27 +++----
gen_sa_accounts.py | 74 ++++++++++++------
nodes.py | 21 +++--
wserver.py | 6 --
28 files changed, 279 insertions(+), 311 deletions(-)
diff --git a/add_to_team_drive.py b/add_to_team_drive.py
index 087c5a5f4a..c9ef4baa78 100644
--- a/add_to_team_drive.py
+++ b/add_to_team_drive.py
@@ -57,7 +57,7 @@
drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
batch = drive.new_batch_http_request()
-aa = glob.glob('%s/*.json' % acc_dir)
+aa = glob.glob(f'{acc_dir}/*.json')
pbar = progress.bar.Bar("Readying accounts", max=len(aa))
for i in aa:
ce = json.loads(open(i, 'r').read())['client_email']
diff --git a/bot/__init__.py b/bot/__init__.py
index d059cb8765..36b918f842 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -176,8 +176,6 @@ def get_client():
MIRROR_LOGS.add(int(chats))
except:
logging.warning('Logs Chat Details not provided!')
- pass
-
if ospath.exists("link_logs.txt"):
with open("link_logs.txt", "r+") as f:
lines = f.readlines()
@@ -191,8 +189,6 @@ def get_client():
LINK_LOGS.add(int(chats))
except:
logging.warning('LINK_LOGS Chat id not provided, Proceeding Without it')
- pass
-
if ospath.exists("logs_chat.txt"):
with open("logs_chat.txt", "r+") as f:
lines = f.readlines()
@@ -213,8 +209,6 @@ def get_client():
LEECH_LOG.add(int(chats))
except:
logging.warning('Leech Log Channel ID not Provided!')
- pass
-
try:
achats = getConfig("LEECH_LOG_ALT")
achats = achats.split(" ")
@@ -222,13 +216,12 @@ def get_client():
LEECH_LOG_ALT.add(int(chats))
except:
logging.warning('Leech Log alt Channel ID not Provided!')
- pass
try:
BOT_TOKEN = getConfig('BOT_TOKEN')
parent_id = getConfig('GDRIVE_FOLDER_ID')
DOWNLOAD_DIR = getConfig('DOWNLOAD_DIR')
if not DOWNLOAD_DIR.endswith("/"):
- DOWNLOAD_DIR = DOWNLOAD_DIR + '/'
+ DOWNLOAD_DIR = f'{DOWNLOAD_DIR}/'
DOWNLOAD_STATUS_UPDATE_INTERVAL = int(getConfig('DOWNLOAD_STATUS_UPDATE_INTERVAL'))
OWNER_ID = int(getConfig('OWNER_ID'))
AUTO_DELETE_MESSAGE_DURATION = int(getConfig('AUTO_DELETE_MESSAGE_DURATION'))
@@ -267,7 +260,6 @@ def aria2c_init():
aria2.remove([download], force=True, files=True)
except Exception as e:
logging.error(f"Aria2c initializing error: {e}")
- pass
if not ospath.isfile(".restartmsg"):
sleep(1)
@@ -306,7 +298,7 @@ def aria2c_init():
except KeyError:
MEGA_API_KEY = None
LOGGER.info("MEGA API KEY NOT AVAILABLE")
-if MEGAREST is True:
+if MEGAREST:
# Start megasdkrest binary
Popen(["megasdkrest", "--apikey", MEGA_API_KEY])
sleep(3) # Wait for the mega server to start listening
@@ -583,7 +575,7 @@ def aria2c_init():
FSUB = FSUB.lower() == 'true'
except KeyError:
FSUB = False
-
+
try:
FSUB_CHANNEL_ID = int(getConfig('FSUB_CHANNEL_ID'))
except KeyError:
@@ -591,7 +583,7 @@ def aria2c_init():
try:
CHANNEL_USERNAME: str = getConfig('CHANNEL_USERNAME').replace("@", "")
- if len(CHANNEL_USERNAME) == 0:
+ if not CHANNEL_USERNAME:
CHANNEL_USERNAME = 'heliosmirror'
except KeyError:
logging.warning('CHANNEL_USERNAME not provided')
@@ -622,20 +614,19 @@ def aria2c_init():
ACCOUNTS_ZIP_URL = getConfig('ACCOUNTS_ZIP_URL')
if len(ACCOUNTS_ZIP_URL) == 0:
raise KeyError
- else:
- try:
- res = rget(ACCOUNTS_ZIP_URL)
- if res.status_code == 200:
- with open('accounts.zip', 'wb+') as f:
- f.write(res.content)
- else:
- logging.error(f"Failed to download accounts.zip, link got HTTP response: {res.status_code}")
- except Exception as e:
- logging.error(f"ACCOUNTS_ZIP_URL: {e}")
- raise KeyError
- srun(["unzip", "-q", "-o", "accounts.zip"])
- srun(["chmod", "-R", "777", "accounts"])
- osremove("accounts.zip")
+ try:
+ res = rget(ACCOUNTS_ZIP_URL)
+ if res.status_code == 200:
+ with open('accounts.zip', 'wb+') as f:
+ f.write(res.content)
+ else:
+ logging.error(f"Failed to download accounts.zip, link got HTTP response: {res.status_code}")
+ except Exception as e:
+ logging.error(f"ACCOUNTS_ZIP_URL: {e}")
+ raise KeyError
+ srun(["unzip", "-q", "-o", "accounts.zip"])
+ srun(["chmod", "-R", "777", "accounts"])
+ osremove("accounts.zip")
except KeyError:
pass
try:
diff --git a/bot/__main__.py b/bot/__main__.py
index 3c18ed8187..6991cbb5ad 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -75,14 +75,13 @@ def start(update, context):
else:
if BOT_PM:
message = sendMessage(f'Dear {uname},\n\nIf You Want To Use Me, You Have To Join @{CHANNEL_USERNAME}\n\nNOTE: All The Uploaded Links and Leeched Files By You Will Be Sent Here In Your Private Chat From Now.', context.bot, update)
- Thread(target=auto_delete_message, args=(context.bot, update.message, message)).start()
- return
else:
message = sendMarkup(
f'Dear {uname},\n\nIf You Want To Use Me, You Have To Join @{CHANNEL_USERNAME}\n\n',
context.bot, update, reply_markup)
- Thread(target=auto_delete_message, args=(context.bot, update.message, message)).start()
- return
+
+ Thread(target=auto_delete_message, args=(context.bot, update.message, message)).start()
+ return
def restart(update, context):
restart_message = sendMessage("Restarting...", context.bot, update)
if Interval:
@@ -287,9 +286,9 @@ def main():
osremove(".restartmsg")
elif OWNER_ID:
try:
- text = "Bot Restarted!"
- message = bot.sendMessage(chat_id=OWNER_ID, text=text, parse_mode=ParseMode.HTML)
if AUTHORIZED_CHATS:
+ text = "Bot Restarted!"
+ message = bot.sendMessage(chat_id=OWNER_ID, text=text, parse_mode=ParseMode.HTML)
for i in AUTHORIZED_CHATS:
bot.sendMessage(chat_id=i, text=text, parse_mode=ParseMode.HTML)
except Exception as e:
diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py
index 9aba4dfab4..546d048f75 100644
--- a/bot/helper/ext_utils/db_handler.py
+++ b/bot/helper/ext_utils/db_handler.py
@@ -53,8 +53,7 @@ def db_init(self):
def db_load(self):
# User Data
self.cur.execute("SELECT * from users")
- rows = self.cur.fetchall() #returns a list ==> (uid, sudo, auth, media, doc, thumb)
- if rows:
+ if rows := self.cur.fetchall():
for row in rows:
if row[1] and row[0] not in SUDO_USERS:
SUDO_USERS.add(row[0])
@@ -80,8 +79,7 @@ def db_load(self):
LOGGER.info("Users data has been imported from Database")
# Rss Data
self.cur.execute("SELECT * FROM rss")
- rows = self.cur.fetchall() #returns a list ==> (name, feed_link, last_link, last_title, filters)
- if rows:
+ if rows := self.cur.fetchall():
for row in rows:
f_lists = []
if row[4] is not None:
@@ -97,9 +95,9 @@ def user_auth(self, chat_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif not self.user_check(chat_id):
- sql = 'INSERT INTO users (uid, auth) VALUES ({}, TRUE)'.format(chat_id)
+ sql = f'INSERT INTO users (uid, auth) VALUES ({chat_id}, TRUE)'
else:
- sql = 'UPDATE users SET auth = TRUE WHERE uid = {}'.format(chat_id)
+ sql = f'UPDATE users SET auth = TRUE WHERE uid = {chat_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -109,7 +107,7 @@ def user_unauth(self, chat_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif self.user_check(chat_id):
- sql = 'UPDATE users SET auth = FALSE WHERE uid = {}'.format(chat_id)
+ sql = f'UPDATE users SET auth = FALSE WHERE uid = {chat_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -120,9 +118,9 @@ def addleech_log(self, chat_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif not self.user_check(chat_id):
- sql = 'INSERT INTO users (uid, leechlog) VALUES ({}, TRUE)'.format(chat_id)
+ sql = f'INSERT INTO users (uid, leechlog) VALUES ({chat_id}, TRUE)'
else:
- sql = 'UPDATE users SET leechlog = TRUE WHERE uid = {}'.format(chat_id)
+ sql = f'UPDATE users SET leechlog = TRUE WHERE uid = {chat_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -132,7 +130,7 @@ def rmleech_log(self, chat_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif self.user_check(chat_id):
- sql = 'UPDATE users SET leechlog = FALSE WHERE uid = {}'.format(chat_id)
+ sql = f'UPDATE users SET leechlog = FALSE WHERE uid = {chat_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -143,9 +141,9 @@ def addleech_log_alt(self, chat_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif not self.user_check(chat_id):
- sql = 'INSERT INTO users (uid, leechlogalt) VALUES ({}, TRUE)'.format(chat_id)
+ sql = f'INSERT INTO users (uid, leechlogalt) VALUES ({chat_id}, TRUE)'
else:
- sql = 'UPDATE users SET leechlogalt = TRUE WHERE uid = {}'.format(chat_id)
+ sql = f'UPDATE users SET leechlogalt = TRUE WHERE uid = {chat_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -155,7 +153,7 @@ def rmleech_log_alt(self, chat_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif self.user_check(chat_id):
- sql = 'UPDATE users SET leechlogalt = FALSE WHERE uid = {}'.format(chat_id)
+ sql = f'UPDATE users SET leechlogalt = FALSE WHERE uid = {chat_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -165,9 +163,9 @@ def user_addsudo(self, user_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif not self.user_check(user_id):
- sql = 'INSERT INTO users (uid, sudo) VALUES ({}, TRUE)'.format(user_id)
+ sql = f'INSERT INTO users (uid, sudo) VALUES ({user_id}, TRUE)'
else:
- sql = 'UPDATE users SET sudo = TRUE WHERE uid = {}'.format(user_id)
+ sql = f'UPDATE users SET sudo = TRUE WHERE uid = {user_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -177,19 +175,19 @@ def user_rmsudo(self, user_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif self.user_check(user_id):
- sql = 'UPDATE users SET sudo = FALSE WHERE uid = {}'.format(user_id)
- self.cur.execute(sql)
- self.conn.commit()
- self.disconnect()
- return 'Successfully removed from Sudo'
+ sql = f'UPDATE users SET sudo = FALSE WHERE uid = {user_id}'
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Successfully removed from Sudo'
def user_media(self, user_id: int):
if self.err:
return
elif not self.user_check(user_id):
- sql = 'INSERT INTO users (uid, media) VALUES ({}, TRUE)'.format(user_id)
+ sql = f'INSERT INTO users (uid, media) VALUES ({user_id}, TRUE)'
else:
- sql = 'UPDATE users SET media = TRUE, doc = FALSE WHERE uid = {}'.format(user_id)
+ sql = f'UPDATE users SET media = TRUE, doc = FALSE WHERE uid = {user_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -198,9 +196,9 @@ def user_doc(self, user_id: int):
if self.err:
return
elif not self.user_check(user_id):
- sql = 'INSERT INTO users (uid, doc) VALUES ({}, TRUE)'.format(user_id)
+ sql = f'INSERT INTO users (uid, doc) VALUES ({user_id}, TRUE)'
else:
- sql = 'UPDATE users SET media = FALSE, doc = TRUE WHERE uid = {}'.format(user_id)
+ sql = f'UPDATE users SET media = FALSE, doc = TRUE WHERE uid = {user_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -210,10 +208,12 @@ def user_save_thumb(self, user_id: int, path):
return
image = open(path, 'rb+')
image_bin = image.read()
- if not self.user_check(user_id):
- sql = 'INSERT INTO users (thumb, uid) VALUES (%s, %s)'
- else:
- sql = 'UPDATE users SET thumb = %s WHERE uid = %s'
+ sql = (
+ 'UPDATE users SET thumb = %s WHERE uid = %s'
+ if self.user_check(user_id)
+ else 'INSERT INTO users (thumb, uid) VALUES (%s, %s)'
+ )
+
self.cur.execute(sql, (image_bin, user_id))
self.conn.commit()
self.disconnect()
@@ -222,15 +222,14 @@ def user_rm_thumb(self, user_id: int, path):
if self.err:
return
elif self.user_check(user_id):
- sql = 'UPDATE users SET thumb = NULL WHERE uid = {}'.format(user_id)
+ sql = f'UPDATE users SET thumb = NULL WHERE uid = {user_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
def user_check(self, uid: int):
- self.cur.execute("SELECT * FROM users WHERE uid = {}".format(uid))
- res = self.cur.fetchone()
- return res
+ self.cur.execute(f"SELECT * FROM users WHERE uid = {uid}")
+ return self.cur.fetchone()
def rss_add(self, name, link, last, title, filters):
if self.err:
@@ -266,9 +265,9 @@ def user_addmod(self, user_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif not self.user_check(user_id):
- sql = 'INSERT INTO users (uid, mod) VALUES ({}, TRUE)'.format(user_id)
+ sql = f'INSERT INTO users (uid, mod) VALUES ({user_id}, TRUE)'
else:
- sql = 'UPDATE users SET mod = TRUE WHERE uid = {}'.format(user_id)
+ sql = f'UPDATE users SET mod = TRUE WHERE uid = {user_id}'
self.cur.execute(sql)
self.conn.commit()
self.disconnect()
@@ -278,11 +277,11 @@ def user_rmmod(self, user_id: int):
if self.err:
return "Error in DB connection, check log for details"
elif self.user_check(user_id):
- sql = 'UPDATE users SET mod = FALSE WHERE uid = {}'.format(user_id)
- self.cur.execute(sql)
- self.conn.commit()
- self.disconnect()
- return 'Successfully removed from Moderator'
+ sql = f'UPDATE users SET mod = FALSE WHERE uid = {user_id}'
+ self.cur.execute(sql)
+ self.conn.commit()
+ self.disconnect()
+ return 'Successfully removed from Moderator'
if DB_URI is not None:
DbManger().db_init()
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
index 3a116b1b18..1dbfd28c10 100644
--- a/bot/helper/ext_utils/fs_utils.py
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -178,8 +178,8 @@ def split(path, size, file_, dirpath, split_size, start_time=0, i=1, inLoop=Fals
if file_.upper().endswith(VIDEO_SUFFIXES):
base_name, extension = ospath.splitext(file_)
split_size = split_size - 2500000
- while i <= parts :
- parted_name = "{}.part{}{}".format(str(base_name), str(i).zfill(3), str(extension))
+ while i <= parts:
+ parted_name = f"{str(base_name)}.part{str(i).zfill(3)}{str(extension)}"
out_path = ospath.join(dirpath, parted_name)
run(["ffmpeg", "-hide_banner", "-loglevel", "error", "-i",
path, "-ss", str(start_time), "-fs", str(split_size),
@@ -197,7 +197,7 @@ def split(path, size, file_, dirpath, split_size, start_time=0, i=1, inLoop=Fals
start_time += lpd - 3
i = i + 1
else:
- out_path = ospath.join(dirpath, file_ + ".")
+ out_path = ospath.join(dirpath, f"{file_}.")
run(["split", "--numeric-suffixes=1", "--suffix-length=3", f"--bytes={split_size}", path, out_path])
def get_media_info(path):
diff --git a/bot/helper/mirror_utils/download_utils/aria2_download.py b/bot/helper/mirror_utils/download_utils/aria2_download.py
index 14e8ea1852..b12bfec0c8 100644
--- a/bot/helper/mirror_utils/download_utils/aria2_download.py
+++ b/bot/helper/mirror_utils/download_utils/aria2_download.py
@@ -16,37 +16,37 @@ def __onDownloadStarted(api, gid):
sleep(1.5)
dl = getDownloadByGid(gid)
download = api.get_download(gid)
- if STOP_DUPLICATE and dl is not None and not dl.getListener().isLeech:
- LOGGER.info('Checking File/Folder if already in Drive...')
- sname = download.name
- if dl.getListener().isZip:
- sname = sname + ".zip"
- elif dl.getListener().extract:
- try:
- sname = get_base_name(sname)
- except:
- sname = None
- if sname is not None:
- smsg, button = GoogleDriveHelper().drive_list(sname, True)
- if smsg:
- dl.getListener().onDownloadError('File/Folder already available in Drive.\n\n')
- api.remove([download], force=True, files=True)
- return sendMarkup("Here are the search results:", dl.getListener().bot, dl.getListener().update, button)
- if dl is not None and (ZIP_UNZIP_LIMIT is not None or TORRENT_DIRECT_LIMIT is not None):
- sleep(1)
- limit = None
- if ZIP_UNZIP_LIMIT is not None and (dl.getListener().isZip or dl.getListener().extract):
- mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
- limit = ZIP_UNZIP_LIMIT
- elif TORRENT_DIRECT_LIMIT is not None:
- mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}GB'
- limit = TORRENT_DIRECT_LIMIT
- if limit is not None:
- LOGGER.info('Checking File/Folder Size...')
- size = api.get_download(gid).total_length
- if size > limit * 1024**3:
- dl.getListener().onDownloadError(f'{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}')
- return api.remove([download], force=True, files=True)
+ if STOP_DUPLICATE and dl is not None and not dl.getListener().isLeech:
+ LOGGER.info('Checking File/Folder if already in Drive...')
+ sname = download.name
+ if dl.getListener().isZip:
+ sname = f"{sname}.zip"
+ elif dl.getListener().extract:
+ try:
+ sname = get_base_name(sname)
+ except:
+ sname = None
+ if sname is not None:
+ smsg, button = GoogleDriveHelper().drive_list(sname, True)
+ if smsg:
+ dl.getListener().onDownloadError('File/Folder already available in Drive.\n\n')
+ api.remove([download], force=True, files=True)
+ return sendMarkup("Here are the search results:", dl.getListener().bot, dl.getListener().update, button)
+ if dl is not None and (ZIP_UNZIP_LIMIT is not None or TORRENT_DIRECT_LIMIT is not None):
+ sleep(1)
+ limit = None
+ if ZIP_UNZIP_LIMIT is not None and (dl.getListener().isZip or dl.getListener().extract):
+ mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
+ limit = ZIP_UNZIP_LIMIT
+ elif TORRENT_DIRECT_LIMIT is not None:
+ mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}GB'
+ limit = TORRENT_DIRECT_LIMIT
+ if limit is not None:
+ LOGGER.info('Checking File/Folder Size...')
+ size = api.get_download(gid).total_length
+ if size > limit * 1024**3:
+ dl.getListener().onDownloadError(f'{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}')
+ return api.remove([download], force=True, files=True)
except:
LOGGER.error(f"onDownloadStart: {gid} stop duplicate and size check didn't pass")
@@ -70,8 +70,7 @@ def __onDownloadComplete(api, gid):
@new_thread
def __onDownloadStopped(api, gid):
sleep(4)
- dl = getDownloadByGid(gid)
- if dl:
+ if dl := getDownloadByGid(gid):
dl.getListener().onDownloadError('Dead torrent!')
@new_thread
diff --git a/bot/helper/mirror_utils/download_utils/direct_link_generator.py b/bot/helper/mirror_utils/download_utils/direct_link_generator.py
index 074bc01120..95f443299f 100644
--- a/bot/helper/mirror_utils/download_utils/direct_link_generator.py
+++ b/bot/helper/mirror_utils/download_utils/direct_link_generator.py
@@ -143,7 +143,8 @@ def uptobox(url: str) -> str:
dl_url = link
except:
file_id = re.findall(r'\bhttps?://.*uptobox\.com/(\w+)', url)[0]
- file_link = 'https://uptobox.com/api/link?token=%s&file_code=%s' % (UPTOBOX_TOKEN, file_id)
+ file_link = f'https://uptobox.com/api/link?token={UPTOBOX_TOKEN}&file_code={file_id}'
+
req = requests.get(file_link)
result = req.json()
dl_url = result['data']['dlLink']
@@ -254,7 +255,9 @@ def pixeldrain(url: str) -> str:
if resp["success"]:
return dl_link
else:
- raise DirectDownloadLinkException("ERROR: Cant't download due {}.".format(resp["message"]))
+ raise DirectDownloadLinkException(
+ f"""ERROR: Cant't download due {resp["message"]}."""
+ )
def antfiles(url: str) -> str:
""" Antfiles direct link generator
@@ -320,11 +323,12 @@ def fichier(link: str) -> str:
elif len(soup.find_all("div", {"class": "ct_warn"})) == 2:
str_2 = soup.find_all("div", {"class": "ct_warn"})[-1]
if "you must wait" in str(str_2).lower():
- numbers = [int(word) for word in str(str_2).split() if word.isdigit()]
- if not numbers:
- raise DirectDownloadLinkException("ERROR: 1fichier is on a limit. Please wait a few minutes/hour.")
- else:
+ if numbers := [
+ int(word) for word in str(str_2).split() if word.isdigit()
+ ]:
raise DirectDownloadLinkException(f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.")
+ else:
+ raise DirectDownloadLinkException("ERROR: 1fichier is on a limit. Please wait a few minutes/hour.")
elif "protect access" in str(str_2).lower():
raise DirectDownloadLinkException(f"ERROR: This link requires a password!\n\nThis link requires a password!\n- Insert sign :: after the link and write the password after the sign.\n\nExample:\n/{BotCommands.MirrorCommand} https://1fichier.com/?smmtd8twfpm66awbqz04::love you
\n\n* No spaces between the signs ::\n* For the password, you can use a space!")
else:
@@ -333,11 +337,12 @@ def fichier(link: str) -> str:
str_1 = soup.find_all("div", {"class": "ct_warn"})[-2]
str_3 = soup.find_all("div", {"class": "ct_warn"})[-1]
if "you must wait" in str(str_1).lower():
- numbers = [int(word) for word in str(str_1).split() if word.isdigit()]
- if not numbers:
- raise DirectDownloadLinkException("ERROR: 1fichier is on a limit. Please wait a few minutes/hour.")
- else:
+ if numbers := [
+ int(word) for word in str(str_1).split() if word.isdigit()
+ ]:
raise DirectDownloadLinkException(f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.")
+ else:
+ raise DirectDownloadLinkException("ERROR: 1fichier is on a limit. Please wait a few minutes/hour.")
elif "bad password" in str(str_3).lower():
raise DirectDownloadLinkException("ERROR: The password you entered is wrong!")
else:
@@ -353,7 +358,7 @@ def solidfiles(url: str) -> str:
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36'
}
pageSource = requests.get(url, headers = headers).text
- mainOptions = str(re.search(r'viewerOptions\'\,\ (.*?)\)\;', pageSource).group(1))
+ mainOptions = str(re.search(r'viewerOptions\'\,\ (.*?)\)\;', pageSource)[1])
return jsnloads(mainOptions)["downloadUrl"]
def krakenfiles(page_link: str) -> str:
diff --git a/bot/helper/mirror_utils/download_utils/gd_downloader.py b/bot/helper/mirror_utils/download_utils/gd_downloader.py
index 553e164fa9..35e6e7c8d3 100644
--- a/bot/helper/mirror_utils/download_utils/gd_downloader.py
+++ b/bot/helper/mirror_utils/download_utils/gd_downloader.py
@@ -16,7 +16,7 @@ def add_gd_download(link, listener, is_gdtot):
if STOP_DUPLICATE and not listener.isLeech:
LOGGER.info('Checking File/Folder if already in Drive...')
if listener.isZip:
- gname = name + ".zip"
+ gname = f"{name}.zip"
elif listener.extract:
try:
gname = get_base_name(name)
diff --git a/bot/helper/mirror_utils/download_utils/mega_download.py b/bot/helper/mirror_utils/download_utils/mega_download.py
index 461dae37d9..38e81341e7 100644
--- a/bot/helper/mirror_utils/download_utils/mega_download.py
+++ b/bot/helper/mirror_utils/download_utils/mega_download.py
@@ -71,9 +71,15 @@ def __onDownloadStart(self, name, size, gid):
def __onInterval(self):
dlInfo = self.__mega_client.getDownloadInfo(self.gid)
- if (dlInfo['state'] == constants.State.TYPE_STATE_COMPLETED or dlInfo[
- 'state'] == constants.State.TYPE_STATE_CANCELED or dlInfo[
- 'state'] == constants.State.TYPE_STATE_FAILED) and self.__periodic is not None:
+ if (
+ dlInfo['state']
+ in [
+ constants.State.TYPE_STATE_COMPLETED,
+ constants.State.TYPE_STATE_CANCELED,
+ constants.State.TYPE_STATE_FAILED,
+ ]
+ and self.__periodic is not None
+ ):
self.__periodic.cancel()
if dlInfo['state'] == constants.State.TYPE_STATE_COMPLETED:
self.__onDownloadComplete()
diff --git a/bot/helper/mirror_utils/download_utils/mega_downloader.py b/bot/helper/mirror_utils/download_utils/mega_downloader.py
index 6811d8c0f2..397360555b 100644
--- a/bot/helper/mirror_utils/download_utils/mega_downloader.py
+++ b/bot/helper/mirror_utils/download_utils/mega_downloader.py
@@ -76,7 +76,7 @@ def onRequestTemporaryError(self, api, request, error: MegaError):
LOGGER.error(f'Mega Request error in {error}')
if not self.is_cancelled:
self.is_cancelled = True
- self.listener.onDownloadError("RequestTempError: " + error.toString())
+ self.listener.onDownloadError(f"RequestTempError: {error.toString()}")
self.error = error.toString()
self.continue_event.set()
@@ -157,7 +157,7 @@ def add_mega_download(mega_link: str, path: str, listener):
LOGGER.info('Checking File/Folder if already in Drive')
mname = node.getName()
if listener.isZip:
- mname = mname + ".zip"
+ mname = f"{mname}.zip"
elif listener.extract:
try:
mname = get_base_name(mname)
diff --git a/bot/helper/mirror_utils/download_utils/qbit_downloader.py b/bot/helper/mirror_utils/download_utils/qbit_downloader.py
index 54f19facf3..d785f30238 100644
--- a/bot/helper/mirror_utils/download_utils/qbit_downloader.py
+++ b/bot/helper/mirror_utils/download_utils/qbit_downloader.py
@@ -146,7 +146,7 @@ def _qb_listener(listener, client, ext_hash, select, path):
if qbname.endswith('.!qB'):
qbname = ospath.splitext(qbname)[0]
if listener.isZip:
- qbname = qbname + ".zip"
+ qbname = f"{qbname}.zip"
elif listener.extract:
try:
qbname = get_base_name(qbname)
diff --git a/bot/helper/mirror_utils/download_utils/telegram_downloader.py b/bot/helper/mirror_utils/download_utils/telegram_downloader.py
index c8f7428c9d..479f893844 100644
--- a/bot/helper/mirror_utils/download_utils/telegram_downloader.py
+++ b/bot/helper/mirror_utils/download_utils/telegram_downloader.py
@@ -84,12 +84,8 @@ def __download(self, message, path):
def add_download(self, message, path, filename):
_message = app.get_messages(message.chat.id, reply_to_message_ids=message.message_id)
- media = None
media_array = [_message.document, _message.video, _message.audio]
- for i in media_array:
- if i is not None:
- media = i
- break
+ media = next((i for i in media_array if i is not None), None)
if media is not None:
with global_lock:
# For avoiding locking the thread lock for long time unnecessarily
diff --git a/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py b/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py
index f4dc085507..93209b99db 100644
--- a/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py
+++ b/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py
@@ -137,10 +137,7 @@ def extractMetaData(self, link, name, get_info=False):
ext = realName.split('.')[-1]
if name == "":
newname = str(realName).split(f" [{result['id'].replace('*', '_')}]")
- if len(newname) > 1:
- self.name = newname[0] + '.' + ext
- else:
- self.name = newname[0]
+ self.name = newname[0] + '.' + ext if len(newname) > 1 else newname[0]
else:
self.name = f"{name}.{ext}"
@@ -169,20 +166,19 @@ def add_download(self, link, path, name, qual, playlist):
if qual.startswith('ba/b'):
audio_info = qual.split('-')
qual = audio_info[0]
- if len(audio_info) == 2:
- rate = audio_info[1]
- else:
- rate = 320
+ rate = audio_info[1] if len(audio_info) == 2 else 320
self.opts['postprocessors'] = [{'key': 'FFmpegExtractAudio','preferredcodec': 'mp3','preferredquality': f'{rate}'}]
self.opts['format'] = qual
LOGGER.info(f"Downloading with YT-DLP: {link}")
self.extractMetaData(link, name)
if self.__is_cancelled:
return
- if not self.is_playlist:
- self.opts['outtmpl'] = f"{path}/{self.name}"
- else:
- self.opts['outtmpl'] = f"{path}/{self.name}/%(title)s.%(ext)s"
+ self.opts['outtmpl'] = (
+ f"{path}/{self.name}/%(title)s.%(ext)s"
+ if self.is_playlist
+ else f"{path}/{self.name}"
+ )
+
self.__download(link)
def cancel_download(self):
diff --git a/bot/helper/mirror_utils/status_utils/qbit_download_status.py b/bot/helper/mirror_utils/status_utils/qbit_download_status.py
index e9505d45d2..29f8a62698 100644
--- a/bot/helper/mirror_utils/status_utils/qbit_download_status.py
+++ b/bot/helper/mirror_utils/status_utils/qbit_download_status.py
@@ -33,10 +33,7 @@ def size_raw(self):
Gets total size of the mirror file/folder
:return: total size of mirror
"""
- if self.__select:
- return self.__info.size
- else:
- return self.__info.total_size
+ return self.__info.size if self.__select else self.__info.total_size
def processed_bytes(self):
return self.__info.downloaded
@@ -62,7 +59,7 @@ def status(self):
if download in ["queuedDL", "queuedUP"]:
return MirrorStatus.STATUS_WAITING
elif download in ["metaDL", "checkingResumeData"]:
- return MirrorStatus.STATUS_DOWNLOADING + " (Metadata)"
+ return f"{MirrorStatus.STATUS_DOWNLOADING} (Metadata)"
elif download in ["pausedDL", "pausedUP"]:
return MirrorStatus.STATUS_PAUSE
elif download in ["checkingUP", "checkingDL"]:
diff --git a/bot/helper/mirror_utils/upload_utils/gdriveTools.py b/bot/helper/mirror_utils/upload_utils/gdriveTools.py
index 933ef2c64c..c2ad798ebc 100644
--- a/bot/helper/mirror_utils/upload_utils/gdriveTools.py
+++ b/bot/helper/mirror_utils/upload_utils/gdriveTools.py
@@ -197,9 +197,7 @@ def __upload_file(self, file_path, file_name, mime_type, parent_id):
drive_file = self.__service.files().create(supportsTeamDrives=True,
body=file_metadata, media_body=media_body)
response = None
- while response is None:
- if self.is_cancelled:
- break
+ while response is None and not self.is_cancelled:
try:
self.status, response = drive_file.next_chunk()
except HttpError as err:
@@ -235,7 +233,7 @@ def upload(self, file_name: str):
file_dir = f"{DOWNLOAD_DIR}{self.__listener.message.message_id}"
file_path = f"{file_dir}/{file_name}"
size = get_readable_file_size(get_path_size(file_path))
- LOGGER.info("Uploading File: " + file_path)
+ LOGGER.info(f"Uploading File: {file_path}")
self.updater = setInterval(self.update_interval, self._on_upload_progress)
try:
if ospath.isfile(file_path):
@@ -249,7 +247,7 @@ def upload(self, file_name: str):
return
if link is None:
raise Exception('Upload has been manually cancelled')
- LOGGER.info("Uploaded To G-Drive: " + file_path)
+ LOGGER.info(f"Uploaded To G-Drive: {file_path}")
else:
mime_type = 'Folder'
dir_id = self.__create_directory(ospath.basename(ospath.abspath(file_name)), parent_id)
@@ -259,7 +257,7 @@ def upload(self, file_name: str):
link = f"https://drive.google.com/folderview?id={dir_id}"
if self.is_cancelled:
return
- LOGGER.info("Uploaded To G-Drive: " + file_name)
+ LOGGER.info(f"Uploaded To G-Drive: {file_name}")
except Exception as e:
if isinstance(e, RetryError):
LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}")
@@ -398,16 +396,16 @@ def clone(self, link):
if BUTTON_FIVE_NAME is not None and BUTTON_FIVE_URL is not None:
buttons.buildbutton(f"{BUTTON_FIVE_NAME}", f"{BUTTON_FIVE_URL}")
if SOURCE_LINK is True:
- buttons.buildbutton(f"🔗 Source Link", link)
+ buttons.buildbutton("🔗 Source Link", link)
except Exception as err:
if isinstance(err, RetryError):
LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
LOGGER.error(err)
- if "User rate limit exceeded" in str(err):
+ if "User rate limit exceeded" in err:
msg = "User rate limit exceeded."
- elif "File not found" in str(err):
+ elif "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
@@ -655,15 +653,13 @@ def drive_list(self, fileName, stopDup=False, noMulti=False, isRecursive=True, i
if token_service is not None:
self.__service = token_service
for index, parent_id in enumerate(DRIVES_IDS):
- if isRecursive and len(parent_id) > 23:
- isRecur = False
- else:
- isRecur = isRecursive
+ isRecur = False if isRecursive and len(parent_id) > 23 else isRecursive
response = self.__drive_query(parent_id, fileName, stopDup, isRecur, itemType)
- if not response["files"] and noMulti:
- break
- elif not response["files"]:
- continue
+ if not response["files"]:
+ if noMulti:
+ break
+ else:
+ continue
if not Title:
msg += f'
{name}
'
if mime_type is None:
@@ -768,14 +763,14 @@ def count(self, link):
self.__gDrive_file(meta)
msg += f'\n\nSize: {get_readable_file_size(self.__total_bytes)}'
msg += f'\n\nType: {mime_type}'
- msg += f'\nFiles: {self.__total_files}'
+ msg += f'\nFiles: {self.__total_files}'
except Exception as err:
if isinstance(err, RetryError):
LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
LOGGER.error(err)
- if "File not found" in str(err):
+ if "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
@@ -832,7 +827,7 @@ def helper(self, link):
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
LOGGER.error(err)
- if "File not found" in str(err):
+ if "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
@@ -861,9 +856,9 @@ def download(self, link):
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
LOGGER.error(err)
- if "downloadQuotaExceeded" in str(err):
+ if "downloadQuotaExceeded" in err:
err = "Download Quota Exceeded."
- elif "File not found" in str(err):
+ elif "File not found" in err:
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
@@ -881,7 +876,7 @@ def __download_folder(self, folder_id, path, folder_name):
folder_name = folder_name.replace('/', '')
if not ospath.exists(path + folder_name):
makedirs(path + folder_name)
- path += folder_name + '/'
+ path += f'{folder_name}/'
result = self.__getFilesByFolderId(folder_id)
if len(result) == 0:
return
@@ -907,7 +902,7 @@ def __download_folder(self, folder_id, path, folder_name):
def __download_file(self, file_id, path, filename, mime_type):
request = self.__service.files().get_media(fileId=file_id)
filename = filename.replace('/', '')
- fh = FileIO('{}{}'.format(path, filename), 'wb')
+ fh = FileIO(f'{path}{filename}', 'wb')
downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024)
done = False
while not done:
diff --git a/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py
index 7249a392ed..6dbd9af45c 100644
--- a/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py
+++ b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py
@@ -99,7 +99,7 @@ def __upload_file(self, up_path, file_, dirpath):
else:
width, height = get_video_resolution(up_path)
if not file_.upper().endswith(("MKV", "MP4")):
- file_ = ospath.splitext(file_)[0] + '.mp4'
+ file_ = f'{ospath.splitext(file_)[0]}.mp4'
new_path = ospath.join(dirpath, file_)
osrename(up_path, new_path)
up_path = new_path
@@ -178,12 +178,8 @@ def __upload_file(self, up_path, file_, dirpath):
LOGGER.warning("Image Leech is Blocked by Owner")
else:
LOGGER.warning("Image Leech is Blocked by Owner")
- pass
-
elif file_.upper().endswith(TEXT_SUFFIXES):
LOGGER.warning("Useless Text/Html file found, Not Uploading")
- pass
-
else:
notMedia = True
if self.__as_doc or notMedia:
diff --git a/bot/helper/telegram_helper/filters.py b/bot/helper/telegram_helper/filters.py
index 5a6e6b2c36..a5abba23a5 100644
--- a/bot/helper/telegram_helper/filters.py
+++ b/bot/helper/telegram_helper/filters.py
@@ -6,32 +6,32 @@
class CustomFilters:
class _OwnerFilter(MessageFilter):
def filter(self, message):
- return bool(message.from_user.id == OWNER_ID)
+ return message.from_user.id == OWNER_ID
owner_filter = _OwnerFilter()
class _AuthorizedUserFilter(MessageFilter):
def filter(self, message):
id = message.from_user.id
- return bool(id in AUTHORIZED_CHATS or id in SUDO_USERS or id == OWNER_ID)
+ return id in AUTHORIZED_CHATS or id in SUDO_USERS or id == OWNER_ID
authorized_user = _AuthorizedUserFilter()
class _AuthorizedChat(MessageFilter):
def filter(self, message):
- return bool(message.chat.id in AUTHORIZED_CHATS)
+ return message.chat.id in AUTHORIZED_CHATS
authorized_chat = _AuthorizedChat()
class _SudoUser(MessageFilter):
def filter(self, message):
- return bool(message.from_user.id in SUDO_USERS)
+ return message.from_user.id in SUDO_USERS
sudo_user = _SudoUser()
class _ModUser(MessageFilter):
def filter(self, message):
- return bool(message.from_user.id in MOD_USERS)
+ return message.from_user.id in MOD_USERS
mod_user = _ModUser()
@@ -44,14 +44,15 @@ def filter(self, message: Message):
if len(args) > 1:
# Cancelling by gid
with download_dict_lock:
- for message_id, status in download_dict.items():
- if status.gid() == args[1] and status.message.from_user.id == user_id:
- return True
- else:
- return False
+ return any(
+ status.gid() == args[1]
+ and status.message.from_user.id == user_id
+ for message_id, status in download_dict.items()
+ )
+
elif not message.reply_to_message:
return True
# Cancelling by replying to original mirror message
reply_user = message.reply_to_message.from_user.id
- return bool(reply_user == user_id)
+ return reply_user == user_id
mirror_owner_filter = _MirrorOwner()
diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py
index e3ea8a43ea..2ff8f03d05 100644
--- a/bot/modules/authorize.py
+++ b/bot/modules/authorize.py
@@ -115,11 +115,7 @@ def addleechlog(update, context):
file.write(f'{user_id}\n')
msg = 'User added in Leech Logs'
elif reply_message is None:
- # Trying to add a chat in leech logs
- if len(message_) == 2:
- chat_id = int(message_[1])
- else:
- chat_id = update.effective_chat.id
+ chat_id = update.effective_chat.id
if chat_id in LEECH_LOG:
msg = 'Chat Already exist in Leech Logs!'
elif DB_URI is not None:
@@ -162,11 +158,7 @@ def rmleechlog(update, context):
else:
msg = 'User does not exist in leech logs'
elif reply_message is None:
- # Trying to remove a chat from leech log
- if len(message_) == 2:
- chat_id = int(message_[1])
- else:
- chat_id = update.effective_chat.id
+ chat_id = update.effective_chat.id
if chat_id in LEECH_LOG:
if DB_URI is not None:
msg = DbManger().rmleech_log(chat_id)
@@ -213,11 +205,7 @@ def addleechlog_alt(update, context):
file.write(f'{user_id}\n')
msg = 'User added in Leech Logs'
elif reply_message is None:
- # Trying to add a chat in leech logs
- if len(message_) == 2:
- chat_id = int(message_[1])
- else:
- chat_id = update.effective_chat.id
+ chat_id = update.effective_chat.id
if chat_id in LEECH_LOG_ALT:
msg = 'Chat Already exist in Leech Logs!'
elif DB_URI is not None:
@@ -260,11 +248,7 @@ def rmleechlog_alt(update, context):
else:
msg = 'User does not exist in leech logs'
elif reply_message is None:
- # Trying to remove a chat from leech log
- if len(message_) == 2:
- chat_id = int(message_[1])
- else:
- chat_id = update.effective_chat.id
+ chat_id = update.effective_chat.id
if chat_id in LEECH_LOG_ALT:
if DB_URI is not None:
msg = DbManger().rmleech_log_alt(chat_id)
@@ -333,10 +317,7 @@ def removeSudo(update, context):
if len(message_) == 2:
user_id = int(message_[1])
if user_id in SUDO_USERS:
- if DB_URI is not None:
- msg = DbManger().user_rmsudo(user_id)
- else:
- msg = 'Demoted'
+ msg = DbManger().user_rmsudo(user_id) if DB_URI is not None else 'Demoted'
SUDO_USERS.remove(user_id)
else:
msg = 'Not sudo user to demote!'
@@ -345,10 +326,7 @@ def removeSudo(update, context):
else:
user_id = reply_message.from_user.id
if user_id in SUDO_USERS:
- if DB_URI is not None:
- msg = DbManger().user_rmsudo(user_id)
- else:
- msg = 'Demoted'
+ msg = DbManger().user_rmsudo(user_id) if DB_URI is not None else 'Demoted'
SUDO_USERS.remove(user_id)
else:
msg = 'Not sudo user to demote!'
@@ -401,10 +379,7 @@ def removeMod(update, context):
if len(message_) == 2:
user_id = int(message_[1])
if user_id in MOD_USERS:
- if DB_URI is not None:
- msg = DbManger().user_rmmod(user_id)
- else:
- msg = 'Demoted'
+ msg = DbManger().user_rmmod(user_id) if DB_URI is not None else 'Demoted'
MOD_USERS.remove(user_id)
else:
msg = 'Not Moderator to demote!'
@@ -413,10 +388,7 @@ def removeMod(update, context):
else:
user_id = reply_message.from_user.id
if user_id in MOD_USERS:
- if DB_URI is not None:
- msg = DbManger().user_rmmod(user_id)
- else:
- msg = 'Demoted'
+ msg = DbManger().user_rmmod(user_id) if DB_URI is not None else 'Demoted'
MOD_USERS.remove(user_id)
else:
msg = 'Not Moderator to demote!'
diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py
index e7525034d2..cb987d4c19 100644
--- a/bot/modules/cancel_mirror.py
+++ b/bot/modules/cancel_mirror.py
@@ -45,16 +45,12 @@ def cancel_mirror(update, context):
def cancel_all(update, context):
count = 0
gid = 0
- while True:
- dl = getAllDownload()
- if dl:
- if dl.gid() != gid:
- gid = dl.gid()
- dl.download().cancel_download()
- count += 1
- sleep(0.3)
- else:
- break
+ while dl := getAllDownload():
+ if dl.gid() != gid:
+ gid = dl.gid()
+ dl.download().cancel_download()
+ count += 1
+ sleep(0.3)
sendMessage(f'{count} Download(s) has been Cancelled!', context.bot, update)
diff --git a/bot/modules/clone.py b/bot/modules/clone.py
index 7e4267af1f..d7da104fce 100644
--- a/bot/modules/clone.py
+++ b/bot/modules/clone.py
@@ -131,13 +131,14 @@ def cloneNode(update, context):
cc = f'\n\n#Cloned By: {tag}'
if button in ["cancelled", ""]:
sendMessage(f"{tag} {result}", context.bot, update)
- else:
- if AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1:
- auto_delete_message = int(AUTO_DELETE_UPLOAD_MESSAGE_DURATION / 60)
- if update.message.chat.type == 'private':
- warnmsg = ''
- else:
- warnmsg = f'\nThis message will be deleted in {auto_delete_message} minutes from this group.\n'
+ elif AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1:
+ auto_delete_message = int(AUTO_DELETE_UPLOAD_MESSAGE_DURATION / 60)
+ warnmsg = (
+ ''
+ if update.message.chat.type == 'private'
+ else f'\nThis message will be deleted in {auto_delete_message} minutes from this group.\n'
+ )
+
if BOT_PM and update.message.chat.type != 'private':
pmwarn = f"\nI have sent links in PM.\n"
elif update.message.chat.type == 'private':
diff --git a/bot/modules/leech_settings.py b/bot/modules/leech_settings.py
index 1e2b5e4410..8df583072e 100644
--- a/bot/modules/leech_settings.py
+++ b/bot/modules/leech_settings.py
@@ -105,7 +105,7 @@ def setThumb(update, context):
mkdir(path)
photo_msg = app.get_messages(update.message.chat.id, reply_to_message_ids=update.message.message_id)
photo_dir = app.download_media(photo_msg, file_name=path)
- des_dir = ospath.join(path, str(user_id) + ".jpg")
+ des_dir = ospath.join(path, f"{str(user_id)}.jpg")
Image.open(photo_dir).convert("RGB").save(des_dir, "JPEG")
osremove(photo_dir)
if DB_URI is not None:
diff --git a/bot/modules/mirror.py b/bot/modules/mirror.py
index b316e6b3a0..113d558342 100644
--- a/bot/modules/mirror.py
+++ b/bot/modules/mirror.py
@@ -84,16 +84,16 @@ def onDownloadComplete(self):
with download_dict_lock:
download_dict[self.uid] = ZipStatus(name, m_path, size)
pswd = self.pswd
- path = m_path + ".zip"
+ path = f"{m_path}.zip"
LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
if pswd is not None:
if self.isLeech and int(size) > TG_SPLIT_SIZE:
- path = m_path + ".zip"
+ path = f"{m_path}.zip"
srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{pswd}", path, m_path])
else:
srun(["7z", "a", "-mx=0", f"-p{pswd}", path, m_path])
elif self.isLeech and int(size) > TG_SPLIT_SIZE:
- path = m_path + ".zip"
+ path = f"{m_path}.zip"
srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
else:
srun(["7z", "a", "-mx=0", path, m_path])
@@ -249,10 +249,12 @@ def onUploadComplete(self, link: str, size, files, folders, typ, name: str):
except:
pass
auto_delete_message = int(AUTO_DELETE_UPLOAD_MESSAGE_DURATION / 60)
- if self.message.chat.type == 'private':
- warnmsg = ''
- else:
- warnmsg = f'\nThis message will be deleted in {auto_delete_message} minutes from this group.\n'
+ warnmsg = (
+ ''
+ if self.message.chat.type == 'private'
+ else f'\nThis message will be deleted in {auto_delete_message} minutes from this group.\n'
+ )
+
else:
warnmsg = ''
if BOT_PM and self.message.chat.type != 'private':
@@ -264,7 +266,6 @@ def onUploadComplete(self, link: str, size, files, folders, typ, name: str):
else:
pmwarn = ''
pmwarn_mirror = ''
- logwarn = f"\nI have sent files in Log Channel.\n"
if self.isLeech:
count = len(files)
msg += f'\nTotal Files: {count}'
@@ -301,6 +302,7 @@ def onUploadComplete(self, link: str, size, files, folders, typ, name: str):
else:
fmsg = '\n\n'
+ logwarn = f"\nI have sent files in Log Channel.\n"
for index, item in enumerate(list(files), start=1):
msg_id = files[item]
link = f"https://t.me/c/{chat_id}/{msg_id}"
diff --git a/bot/modules/mirror_status.py b/bot/modules/mirror_status.py
index 4ddd2f5912..78474d0ae8 100644
--- a/bot/modules/mirror_status.py
+++ b/bot/modules/mirror_status.py
@@ -35,8 +35,7 @@ def status_pages(update, context):
data = query.data
data = data.split(' ')
query.answer()
- done = turn(data)
- if done:
+ if done := turn(data):
update_all_messages()
else:
query.message.delete()
diff --git a/bot/modules/rss.py b/bot/modules/rss.py
index fd569e7c2b..adcb4df5ee 100644
--- a/bot/modules/rss.py
+++ b/bot/modules/rss.py
@@ -168,7 +168,11 @@ def rss_monitor(context):
break
parse = True
for list in data[3]:
- if not any(x in str(rss_d.entries[feed_count]['title']).lower() for x in list):
+ if all(
+ x
+ not in str(rss_d.entries[feed_count]['title']).lower()
+ for x in list
+ ):
parse = False
feed_count += 1
break
diff --git a/bot/modules/search.py b/bot/modules/search.py
index 74d96a6672..419a8e83c9 100644
--- a/bot/modules/search.py
+++ b/bot/modules/search.py
@@ -19,8 +19,7 @@
if SEARCH_PLUGINS is not None:
PLUGINS = []
qbclient = get_client()
- qb_plugins = qbclient.search_plugins()
- if qb_plugins:
+ if qb_plugins := qbclient.search_plugins():
for plugin in qb_plugins:
qbclient.search_uninstall_plugin(names=plugin['name'])
qbclient.search_install_plugin(SEARCH_PLUGINS)
@@ -63,10 +62,10 @@ def torser(update, context):
buttons.sbutton("Cancel", f"torser {user_id} cancel")
button = InlineKeyboardMarkup(buttons.build_menu(2))
sendMarkup('Choose tool to search:', context.bot, update, button)
- elif SEARCH_API_LINK is not None and SEARCH_PLUGINS is None:
+ elif SEARCH_API_LINK is not None:
button = _api_buttons(user_id)
sendMarkup('Choose site to search:', context.bot, update, button)
- elif SEARCH_API_LINK is None and SEARCH_PLUGINS is not None:
+ elif SEARCH_PLUGINS is not None:
button = _plugin_buttons(user_id)
sendMarkup('Choose site to search:', context.bot, update, button)
else:
@@ -111,11 +110,10 @@ def _search(key, site, message, tool):
search_results = resp.json()
if site == "all":
search_results = list(itertools.chain.from_iterable(search_results))
- if isinstance(search_results, list):
- msg = f"Found {min(len(search_results), SEARCH_LIMIT)}"
- msg += f" result for {key}\nTorrent Site:- {SITES.get(site)}"
- else:
+ if not isinstance(search_results, list):
return editMessage(f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", message)
+ msg = f"Found {min(len(search_results), SEARCH_LIMIT)}"
+ msg += f" result for {key}\nTorrent Site:- {SITES.get(site)}"
except Exception as e:
editMessage(str(e), message)
else:
@@ -130,11 +128,10 @@ def _search(key, site, message, tool):
dict_search_results = client.search_results(search_id=search_id)
search_results = dict_search_results.results
total_results = dict_search_results.total
- if total_results != 0:
- msg = f"Found {min(total_results, SEARCH_LIMIT)}"
- msg += f" result for {key}\nTorrent Site:- {site.capitalize()}"
- else:
+ if total_results == 0:
return editMessage(f"No result found for {key}\nTorrent Site:- {site.capitalize()}", message)
+ msg = f"Found {min(total_results, SEARCH_LIMIT)}"
+ msg += f" result for {key}\nTorrent Site:- {site.capitalize()}"
link = _getResult(search_results, key, message, tool)
buttons = button_build.ButtonMaker()
buttons.buildbutton("🔎 Click Here to View Results", link)
@@ -228,8 +225,7 @@ def _api_buttons(user_id):
for data, name in SITES.items():
buttons.sbutton(name, f"torser {user_id} {data} api")
buttons.sbutton("Cancel", f"torser {user_id} cancel")
- button = InlineKeyboardMarkup(buttons.build_menu(2))
- return button
+ return InlineKeyboardMarkup(buttons.build_menu(2))
def _plugin_buttons(user_id):
buttons = button_build.ButtonMaker()
@@ -243,8 +239,7 @@ def _plugin_buttons(user_id):
buttons.sbutton(siteName.capitalize(), f"torser {user_id} {siteName} plugin")
buttons.sbutton('All', f"torser {user_id} all plugin")
buttons.sbutton("Cancel", f"torser {user_id} cancel")
- button = InlineKeyboardMarkup(buttons.build_menu(2))
- return button
+ return InlineKeyboardMarkup(buttons.build_menu(2))
torser_handler = CommandHandler(BotCommands.TorrentSearchCommand, torser, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
diff --git a/gen_sa_accounts.py b/gen_sa_accounts.py
index 7f1d7e08a5..2c8690d770 100644
--- a/gen_sa_accounts.py
+++ b/gen_sa_accounts.py
@@ -26,15 +26,24 @@ def _create_accounts(service, project, count):
batch = service.new_batch_http_request(callback=_def_batch_resp)
for _ in range(count):
aid = _generate_id('mfc-')
- batch.add(service.projects().serviceAccounts().create(name='projects/' + project, body={'accountId': aid,
- 'serviceAccount': {
- 'displayName': aid}}))
+ batch.add(
+ service.projects()
+ .serviceAccounts()
+ .create(
+ name=f'projects/{project}',
+ body={
+ 'accountId': aid,
+ 'serviceAccount': {'displayName': aid},
+ },
+ )
+ )
+
batch.execute()
# Create accounts needed to fill project
def _create_remaining_accounts(iam, project):
- print('Creating accounts in %s' % project)
+ print(f'Creating accounts in {project}')
sa_count = len(_list_sas(iam, project))
while sa_count != 100:
_create_accounts(iam, project, 100 - sa_count)
@@ -58,14 +67,14 @@ def _def_batch_resp(id, resp, exception):
if str(exception).startswith('