diff --git a/app/__init__.py b/app/__init__.py
index a58e7fc..24c1034 100755
--- a/app/__init__.py
+++ b/app/__init__.py
@@ -6,7 +6,6 @@ load_dotenv("config.env")
from .config import Config
from .core.client import BOT
-
if not os.environ.get("TERMUX_APK_RELEASE"):
import uvloop
diff --git a/app/api/instagram.py b/app/api/instagram.py
index 26e3f13..9119ebc 100755
--- a/app/api/instagram.py
+++ b/app/api/instagram.py
@@ -5,16 +5,14 @@ from app import Config
from app.core import aiohttp_tools
from app.core.scraper_config import ScraperConfig
-API_KEYS = { "KEYS": Config.API_KEYS , "counter": 0 }
+API_KEYS = {"KEYS": Config.API_KEYS, "counter": 0}
+
class Instagram(ScraperConfig):
def __init__(self, url):
super().__init__()
shortcode = os.path.basename(urlparse(url).path.rstrip("/"))
- self.url = (
- f"https://www.instagram.com/graphql/query?query_hash=2b0673e0dc4580674a88d426fe00ea90&variables=%7B%22shortcode%22%3A%22{shortcode}%22%7D"
- )
- self.set_sauce(url)
+ self.url = f"https://www.instagram.com/graphql/query?query_hash=2b0673e0dc4580674a88d426fe00ea90&variables=%7B%22shortcode%22%3A%22{shortcode}%22%7D"
async def download_or_extract(self):
for func in [self.no_api_dl, self.api_dl]:
@@ -24,17 +22,31 @@ class Instagram(ScraperConfig):
async def no_api_dl(self):
response = await aiohttp_tools.get_json(url=self.url)
- if not response or "data" not in response or not response["data"]["shortcode_media"]:
+ if (
+ not response
+ or "data" not in response
+ or not response["data"]["shortcode_media"]
+ ):
return
return await self.parse_ghraphql(response["data"]["shortcode_media"])
-
async def api_dl(self):
if not Config.API_KEYS:
return
- param = {"api_key": await self.get_key(), "url": self.url, "proxy": "residential", "js": False}
- response = await aiohttp_tools.get_json(url="https://api.webscraping.ai/html", timeout=30, params=param)
- if not response or "data" not in response or not response["data"]["shortcode_media"]:
+ param = {
+ "api_key": await self.get_key(),
+ "url": self.url,
+ "proxy": "residential",
+ "js": False,
+ }
+ response = await aiohttp_tools.get_json(
+ url="https://api.webscraping.ai/html", timeout=30, params=param
+ )
+ if (
+ not response
+ or "data" not in response
+ or not response["data"]["shortcode_media"]
+ ):
return
self.caption = ".."
return await self.parse_ghraphql(response["data"]["shortcode_media"])
@@ -44,7 +56,10 @@ class Instagram(ScraperConfig):
if not type_check:
return
elif type_check == "GraphSidecar":
- self.link = [i["node"].get("video_url") or i["node"].get("display_url") for i in json_["edge_sidecar_to_children"]["edges"]]
+ self.link = [
+ i["node"].get("video_url") or i["node"].get("display_url")
+ for i in json_["edge_sidecar_to_children"]["edges"]
+ ]
self.group = True
else:
if link := json_.get("video_url"):
@@ -64,4 +79,4 @@ class Instagram(ScraperConfig):
count = 0
ret_key = keys[count]
API_KEYS["counter"] = count
- return ret_key
\ No newline at end of file
+ return ret_key
diff --git a/app/api/tiktok_scraper.py b/app/api/tiktok_scraper.py
index 66dd82a..e5a11c9 100644
--- a/app/api/tiktok_scraper.py
+++ b/app/api/tiktok_scraper.py
@@ -56,7 +56,10 @@ class Scraper:
if self.config["Scraper"]["Use_different_protocols"] == "False":
self.proxies = {"all": self.config["Scraper"]["All"]}
else:
- self.proxies = {"http": self.config["Scraper"]["Http_proxy"], "https": self.config["Scraper"]["Https_proxy"]}
+ self.proxies = {
+ "http": self.config["Scraper"]["Http_proxy"],
+ "https": self.config["Scraper"]["Https_proxy"],
+ }
else:
self.proxies = None
# 配置文件不存在则不使用代理/If the configuration file does not exist, do not use
@@ -75,7 +78,10 @@ class Scraper:
try:
# 从输入文字中提取索引链接存入列表/Extract index links from input text and store in
# list
- url = re.findall("http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+", text)
+ url = re.findall(
+ "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+",
+ text,
+ )
# 判断是否有链接/Check if there is a link
if len(url) > 0:
return url[0]
@@ -88,7 +94,9 @@ class Scraper:
@staticmethod
def generate_x_bogus_url(url: str, headers: dict) -> str:
query = urllib.parse.urlparse(url).query
- xbogus = execjs.compile(open("./X-Bogus.js").read()).call("sign", query, headers["User-Agent"])
+ xbogus = execjs.compile(open("./X-Bogus.js").read()).call(
+ "sign", query, headers["User-Agent"]
+ )
new_url = url + "&X-Bogus=" + xbogus
return new_url
@@ -127,7 +135,13 @@ class Scraper:
print("正在通过抖音分享链接获取原始链接...")
try:
async with aiohttp.ClientSession() as session:
- async with session.get(url, headers=self.headers, proxy=self.proxies, allow_redirects=False, timeout=10) as response:
+ async with session.get(
+ url,
+ headers=self.headers,
+ proxy=self.proxies,
+ allow_redirects=False,
+ timeout=10,
+ ) as response:
if response.status == 302:
url = (
response.headers["Location"].split("?")[0]
@@ -165,7 +179,13 @@ class Scraper:
print("正在通过TikTok分享链接获取原始链接...")
try:
async with aiohttp.ClientSession() as session:
- async with session.get(url, headers=self.headers, proxy=self.proxies, allow_redirects=False, timeout=10) as response:
+ async with session.get(
+ url,
+ headers=self.headers,
+ proxy=self.proxies,
+ allow_redirects=False,
+ timeout=10,
+ ) as response:
if response.status == 301:
url = (
response.headers["Location"].split("?")[0]
@@ -209,7 +229,9 @@ class Scraper:
"""
# 调用JavaScript函数
query = urllib.parse.urlparse(url).query
- xbogus = execjs.compile(open("./X-Bogus.js").read()).call("sign", query, self.headers["User-Agent"])
+ xbogus = execjs.compile(open("./X-Bogus.js").read()).call(
+ "sign", query, self.headers["User-Agent"]
+ )
if not quiet_mode:
print("生成的X-Bogus签名为: {}".format(xbogus))
new_url = url + "&X-Bogus=" + xbogus
@@ -275,7 +297,12 @@ class Scraper:
if not quiet_mode:
print("正在获取视频数据API: {}".format(api_url))
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=self.douyin_api_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url,
+ headers=self.douyin_api_headers,
+ proxy=self.proxies,
+ timeout=10,
+ ) as response:
response = await response.json()
# 获取视频数据/Get video data
video_data = response["aweme_detail"]
@@ -297,12 +324,19 @@ class Scraper:
print("正在获取抖音视频数据...")
try:
# 构造访问链接/Construct the access link
- api_url = f"https://live.douyin.com/webcast/web/enter/?aid=6383&web_rid={web_rid}"
+ api_url = (
+ f"https://live.douyin.com/webcast/web/enter/?aid=6383&web_rid={web_rid}"
+ )
# 访问API/Access API
if not quiet_mode:
print("正在获取视频数据API: {}".format(api_url))
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=self.douyin_api_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url,
+ headers=self.douyin_api_headers,
+ proxy=self.proxies,
+ timeout=10,
+ ) as response:
response = await response.json()
# 获取视频数据/Get video data
video_data = response["data"]
@@ -319,12 +353,16 @@ class Scraper:
# 获取单个抖音视频数据/Get single Douyin video data
@retry(stop=stop_after_attempt(4), wait=wait_fixed(7))
- async def get_douyin_user_profile_videos(self, profile_url: str, tikhub_token: str) -> Union[dict, None]:
+ async def get_douyin_user_profile_videos(
+ self, profile_url: str, tikhub_token: str
+ ) -> Union[dict, None]:
try:
api_url = f"https://api.tikhub.io/douyin_profile_videos/?douyin_profile_url={profile_url}&cursor=0&count=20"
_headers = {"Authorization": f"Bearer {tikhub_token}"}
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url, headers=_headers, proxy=self.proxies, timeout=10
+ ) as response:
response = await response.json()
return response
except Exception as e:
@@ -335,12 +373,16 @@ class Scraper:
# 获取抖音主页点赞视频数据/Get Douyin profile like video data
@retry(stop=stop_after_attempt(4), wait=wait_fixed(7))
- async def get_douyin_profile_liked_data(self, profile_url: str, tikhub_token: str) -> Union[dict, None]:
+ async def get_douyin_profile_liked_data(
+ self, profile_url: str, tikhub_token: str
+ ) -> Union[dict, None]:
try:
api_url = f"https://api.tikhub.io/douyin_profile_liked_videos/?douyin_profile_url={profile_url}&cursor=0&count=20"
_headers = {"Authorization": f"Bearer {tikhub_token}"}
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url, headers=_headers, proxy=self.proxies, timeout=10
+ ) as response:
response = await response.json()
return response
except Exception as e:
@@ -351,12 +393,16 @@ class Scraper:
# 获取抖音视频评论数据/Get Douyin video comment data
@retry(stop=stop_after_attempt(4), wait=wait_fixed(7))
- async def get_douyin_video_comments(self, video_url: str, tikhub_token: str) -> Union[dict, None]:
+ async def get_douyin_video_comments(
+ self, video_url: str, tikhub_token: str
+ ) -> Union[dict, None]:
try:
api_url = f"https://api.tikhub.io/douyin_video_comments/?douyin_video_url={video_url}&cursor=0&count=20"
_headers = {"Authorization": f"Bearer {tikhub_token}"}
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url, headers=_headers, proxy=self.proxies, timeout=10
+ ) as response:
response = await response.json()
return response
except Exception as e:
@@ -406,7 +452,12 @@ class Scraper:
if not quiet_mode:
print("正在获取视频数据API: {}".format(api_url))
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=self.tiktok_api_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url,
+ headers=self.tiktok_api_headers,
+ proxy=self.proxies,
+ timeout=10,
+ ) as response:
response = await response.json()
video_data = response["aweme_list"][0]
if not quiet_mode:
@@ -419,12 +470,16 @@ class Scraper:
raise e
@retry(stop=stop_after_attempt(4), wait=wait_fixed(7))
- async def get_tiktok_user_profile_videos(self, tiktok_video_url: str, tikhub_token: str) -> Union[dict, None]:
+ async def get_tiktok_user_profile_videos(
+ self, tiktok_video_url: str, tikhub_token: str
+ ) -> Union[dict, None]:
try:
api_url = f"https://api.tikhub.io/tiktok_profile_videos/?tiktok_video_url={tiktok_video_url}&cursor=0&count=20"
_headers = {"Authorization": f"Bearer {tikhub_token}"}
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url, headers=_headers, proxy=self.proxies, timeout=10
+ ) as response:
response = await response.json()
return response
except Exception as e:
@@ -434,12 +489,16 @@ class Scraper:
raise e
@retry(stop=stop_after_attempt(4), wait=wait_fixed(7))
- async def get_tiktok_user_profile_liked_videos(self, tiktok_video_url: str, tikhub_token: str) -> Union[dict, None]:
+ async def get_tiktok_user_profile_liked_videos(
+ self, tiktok_video_url: str, tikhub_token: str
+ ) -> Union[dict, None]:
try:
api_url = f"https://api.tikhub.io/tiktok_profile_liked_videos/?tiktok_video_url={tiktok_video_url}&cursor=0&count=20"
_headers = {"Authorization": f"Bearer {tikhub_token}"}
async with aiohttp.ClientSession() as session:
- async with session.get(api_url, headers=_headers, proxy=self.proxies, timeout=10) as response:
+ async with session.get(
+ api_url, headers=_headers, proxy=self.proxies, timeout=10
+ ) as response:
response = await response.json()
return response
except Exception as e:
@@ -458,13 +517,21 @@ class Scraper:
print("当前链接平台为:{}".format(url_platform))
# 获取视频ID/Get video ID
print("正在获取视频ID...")
- video_id = await self.get_douyin_video_id(video_url) if url_platform == "douyin" else await self.get_tiktok_video_id(video_url)
+ video_id = (
+ await self.get_douyin_video_id(video_url)
+ if url_platform == "douyin"
+ else await self.get_tiktok_video_id(video_url)
+ )
if video_id:
if not quiet_mode:
print("获取视频ID成功,视频ID为:{}".format(video_id))
# 获取视频数据/Get video data
print("正在获取视频数据...")
- data = await self.get_douyin_video_data(video_id) if url_platform == "douyin" else await self.get_tiktok_video_data(video_id)
+ data = (
+ await self.get_douyin_video_data(video_id)
+ if url_platform == "douyin"
+ else await self.get_tiktok_video_data(video_id)
+ )
if data:
if not quiet_mode:
print("获取视频数据成功,正在判断数据类型...")
@@ -571,7 +638,10 @@ class Scraper:
no_watermark_image_list.append(i["url_list"][0])
watermark_image_list.append(i["download_url_list"][0])
api_data = {
- "image_data": {"no_watermark_image_list": no_watermark_image_list, "watermark_image_list": watermark_image_list}
+ "image_data": {
+ "no_watermark_image_list": no_watermark_image_list,
+ "watermark_image_list": watermark_image_list,
+ }
}
# TikTok数据处理/TikTok data processing
elif url_platform == "tiktok":
@@ -585,8 +655,12 @@ class Scraper:
"video_data": {
"wm_video_url": wm_video,
"wm_video_url_HQ": wm_video,
- "nwm_video_url": data["video"]["play_addr"]["url_list"][0],
- "nwm_video_url_HQ": data["video"]["bit_rate"][0]["play_addr"]["url_list"][0],
+ "nwm_video_url": data["video"]["play_addr"][
+ "url_list"
+ ][0],
+ "nwm_video_url_HQ": data["video"]["bit_rate"][0][
+ "play_addr"
+ ]["url_list"][0],
}
}
# TikTok图片数据处理/TikTok image data processing
@@ -598,25 +672,38 @@ class Scraper:
# 有水印图片列表/With watermark image list
watermark_image_list = []
for i in data["image_post_info"]["images"]:
- no_watermark_image_list.append(i["display_image"]["url_list"][0])
- watermark_image_list.append(i["owner_watermark_image"]["url_list"][0])
+ no_watermark_image_list.append(
+ i["display_image"]["url_list"][0]
+ )
+ watermark_image_list.append(
+ i["owner_watermark_image"]["url_list"][0]
+ )
api_data = {
- "image_data": {"no_watermark_image_list": no_watermark_image_list, "watermark_image_list": watermark_image_list}
+ "image_data": {
+ "no_watermark_image_list": no_watermark_image_list,
+ "watermark_image_list": watermark_image_list,
+ }
}
# 更新数据/Update data
result_data.update(api_data)
# print("数据处理完成,最终数据: \n{}".format(result_data))
# 返回数据/Return data
return result_data
- except Exception as e:
+ except Exception:
if not quiet_mode:
traceback.print_exc()
print("数据处理失败!")
- return {"status": "failed", "message": "数据处理失败!/Data processing failed!"}
+ return {
+ "status": "failed",
+ "message": "数据处理失败!/Data processing failed!",
+ }
else:
if not quiet_mode:
print("[抖音|TikTok方法]返回数据为空,无法处理!")
- return {"status": "failed", "message": "返回数据为空,无法处理!/Return data is empty and cannot be processed!"}
+ return {
+ "status": "failed",
+ "message": "返回数据为空,无法处理!/Return data is empty and cannot be processed!",
+ }
else:
if not quiet_mode:
print("获取视频ID失败!")
@@ -633,12 +720,24 @@ class Scraper:
"platform": data.get("platform"),
"type": data.get("type"),
"desc": data.get("desc"),
- "wm_video_url": data["video_data"]["wm_video_url"] if data["type"] == "video" else None,
- "wm_video_url_HQ": data["video_data"]["wm_video_url_HQ"] if data["type"] == "video" else None,
- "nwm_video_url": data["video_data"]["nwm_video_url"] if data["type"] == "video" else None,
- "nwm_video_url_HQ": data["video_data"]["nwm_video_url_HQ"] if data["type"] == "video" else None,
- "no_watermark_image_list": data["image_data"]["no_watermark_image_list"] if data["type"] == "image" else None,
- "watermark_image_list": data["image_data"]["watermark_image_list"] if data["type"] == "image" else None,
+ "wm_video_url": data["video_data"]["wm_video_url"]
+ if data["type"] == "video"
+ else None,
+ "wm_video_url_HQ": data["video_data"]["wm_video_url_HQ"]
+ if data["type"] == "video"
+ else None,
+ "nwm_video_url": data["video_data"]["nwm_video_url"]
+ if data["type"] == "video"
+ else None,
+ "nwm_video_url_HQ": data["video_data"]["nwm_video_url_HQ"]
+ if data["type"] == "video"
+ else None,
+ "no_watermark_image_list": data["image_data"]["no_watermark_image_list"]
+ if data["type"] == "image"
+ else None,
+ "watermark_image_list": data["image_data"]["watermark_image_list"]
+ if data["type"] == "image"
+ else None,
}
return result
else:
@@ -679,4 +778,4 @@ if __name__ == "__main__":
# api.generate_x_bogus(params)
douyin_url = "https://v.douyin.com/rLyrQxA/6.66"
tiktok_url = "https://vt.tiktok.com/ZSRwWXtdr/"
- asyncio.run(async_test(_douyin_url=douyin_url, _tiktok_url=tiktok_url))
\ No newline at end of file
+ asyncio.run(async_test(_douyin_url=douyin_url, _tiktok_url=tiktok_url))
diff --git a/app/core/message.py b/app/core/message.py
index b83822a..86eec77 100755
--- a/app/core/message.py
+++ b/app/core/message.py
@@ -12,8 +12,42 @@ class Message(MSG):
super().__dict__.update(message.__dict__)
@cached_property
- def text_list(self):
- return self.text.split()
+ def cmd(self):
+ raw_cmd = self.text_list[0]
+ cmd = raw_cmd.lstrip(Config.TRIGGER)
+ return cmd if cmd in Config.CMD_DICT else None
+
+ @cached_property
+ def flags(self):
+ return [i for i in self.text_list if i.startswith("-")]
+
+ @cached_property
+ def flt_input(self):
+ split_lines = self.input.splitlines()
+ split_n_joined = [
+ " ".join([word for word in line.split(" ") if word not in self.flags])
+ for line in split_lines
+ ]
+ return "\n".join(split_n_joined)
+
+ @cached_property
+ def input(self):
+ if len(self.text_list) > 1:
+ return self.text.split(maxsplit=1)[-1]
+ return ""
+
+ @cached_property
+ def replied(self):
+ if self.reply_to_message:
+ return Message.parse_message(self.reply_to_message)
+
+ @cached_property
+ def reply_id(self):
+ return self.replied.id if self.replied else None
+
+ @cached_property
+ def replied_task_id(self):
+ return self.replied.task_id if self.replied else None
@cached_property
def reply_text_list(self):
@@ -27,47 +61,30 @@ class Message(MSG):
return reply_text_list
@cached_property
- def cmd(self):
- raw_cmd = self.text_list[0]
- cmd = raw_cmd.lstrip(Config.TRIGGER)
- return cmd if cmd in Config.CMD_DICT else None
+ def task_id(self):
+ return f"{self.chat.id}-{self.id}"
@cached_property
- def flags(self):
- return [i for i in self.text_list if i.startswith("-")]
+ def text_list(self):
+ return self.text.split()
- @cached_property
- def input(self):
- if len(self.text_list) > 1:
- return self.text.split(maxsplit=1)[-1]
- return ""
-
- @cached_property
- def flt_input(self):
- split_lines = self.input.splitlines()
- split_n_joined = [
- " ".join([word for word in line.split(" ") if word not in self.flags])
- for line in split_lines
- ]
- return "\n".join(split_n_joined)
-
- @cached_property
- def replied(self):
- if self.reply_to_message:
- return Message.parse_message(self.reply_to_message)
-
- @cached_property
- def reply_id(self):
- return self.replied.id if self.replied else None
-
- async def reply(self, text, del_in: int = 0, block=True, **kwargs):
- task = self._client.send_message(
- chat_id=self.chat.id, text=text, reply_to_message_id=self.id, **kwargs
- )
- if del_in:
- await self.async_deleter(task=task, del_in=del_in, block=block)
+ async def async_deleter(self, del_in, task, block):
+ if block:
+ x = await task
+ await asyncio.sleep(del_in)
+ await x.delete()
else:
- return await task
+ asyncio.create_task(
+ self.async_deleter(del_in=del_in, task=task, block=True)
+ )
+
+ async def delete(self, reply=False):
+ try:
+ await super().delete()
+ if reply and self.replied:
+ await self.replied.delete()
+ except MessageDeleteForbidden:
+ pass
async def edit(self, text, del_in: int = 0, block=True, **kwargs):
if len(str(text)) < 4096:
@@ -83,23 +100,14 @@ class Message(MSG):
)
return reply
- async def delete(self, reply=False):
- try:
- await super().delete()
- if reply and self.replied:
- await self.replied.delete()
- except MessageDeleteForbidden:
- pass
-
- async def async_deleter(self, del_in, task, block):
- if block:
- x = await task
- await asyncio.sleep(del_in)
- await x.delete()
+ async def reply(self, text, del_in: int = 0, block=True, **kwargs):
+ task = self._client.send_message(
+ chat_id=self.chat.id, text=text, reply_to_message_id=self.id, **kwargs
+ )
+ if del_in:
+ await self.async_deleter(task=task, del_in=del_in, block=block)
else:
- asyncio.create_task(
- self.async_deleter(del_in=del_in, task=task, block=True)
- )
+ return await task
@classmethod
def parse_message(cls, message):
diff --git a/app/core/shell.py b/app/core/shell.py
index c543caa..fb0cb12 100755
--- a/app/core/shell.py
+++ b/app/core/shell.py
@@ -27,7 +27,6 @@ async def run_shell_cmd(cmd):
class AsyncShell:
-
def __init__(self, process):
self.process = process
self.full_std = ""
@@ -46,6 +45,11 @@ class AsyncShell:
while not self.is_done:
yield self.full_std
+ def cancel(self):
+ if not self.is_done:
+ self.process.kill()
+ self._task.cancel()
+
@classmethod
async def run_cmd(cls, cmd, name="shell"):
sub_process = cls(
@@ -53,6 +57,8 @@ class AsyncShell:
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT
)
)
- asyncio.create_task(sub_process.read_output())
+ sub_process._task = asyncio.create_task(
+ sub_process.read_output(), name="AsyncShell"
+ )
await asyncio.sleep(0.5)
return sub_process
diff --git a/app/plugins/dev_tools.py b/app/plugins/dev_tools.py
index 85accd6..edd6b1d 100644
--- a/app/plugins/dev_tools.py
+++ b/app/plugins/dev_tools.py
@@ -17,9 +17,7 @@ async def run_cmd(bot, message):
cmd = message.input.strip()
reply = await message.reply("executing...")
try:
- proc_stdout = await asyncio.Task(
- shell.run_shell_cmd(cmd), name=f"{message.chat.id}-{reply.id}"
- )
+ proc_stdout = await asyncio.Task(shell.run_shell_cmd(cmd), name=reply.task_id)
except asyncio.exceptions.CancelledError:
return await reply.edit("`Cancelled...`")
output = f"`${cmd}`\n\n`{proc_stdout}`"
@@ -28,12 +26,12 @@ async def run_cmd(bot, message):
# Shell but Live Output
async def live_shell(bot, message):
+ cmd = message.input.strip()
+ reply = await message.reply("`getting live output....`")
+ sub_process = await shell.AsyncShell.run_cmd(cmd)
+ sleep_for = 1
+ output = ""
try:
- cmd = message.input.strip()
- reply = await message.reply("`getting live output....`")
- sub_process = await shell.AsyncShell.run_cmd(cmd)
- sleep_for = 1
- output = ""
async for stdout in sub_process.get_output():
if output != stdout:
if len(stdout) <= 4096:
@@ -43,11 +41,9 @@ async def live_shell(bot, message):
parse_mode=ParseMode.MARKDOWN,
)
output = stdout
- if sleep_for >= 5:
+ if sleep_for >= 6:
sleep_for = 1
- await asyncio.Task(
- asyncio.sleep(sleep_for), name=f"{message.chat.id}-{reply.id}"
- )
+ await asyncio.Task(asyncio.sleep(sleep_for), name=reply.task_id)
sleep_for += 1
return await reply.edit(
f"`$ {cmd}\n\n``{sub_process.full_std}`",
@@ -55,7 +51,8 @@ async def live_shell(bot, message):
disable_web_page_preview=True,
)
except asyncio.exceptions.CancelledError:
- return await reply.edit("`Cancelled...`")
+ sub_process.cancel()
+ return await reply.edit(f"`Cancelled....`")
# Run Python code
@@ -74,7 +71,7 @@ async def executor(bot, message):
# Create and initialise the function
exec(f"async def _exec(bot, message):\n {formatted_code}")
func_out = await asyncio.Task(
- locals()["_exec"](bot, message), name=f"{message.chat.id}-{reply.id}"
+ locals()["_exec"](bot, message), name=reply.task_id
)
except asyncio.exceptions.CancelledError:
return await reply.edit("`Cancelled....`")
@@ -82,9 +79,11 @@ async def executor(bot, message):
func_out = str(traceback.format_exc())
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
- output = f"`{codeOut.getvalue().strip() or codeErr.getvalue().strip() or func_out}`"
+ output = codeErr.getvalue().strip() or codeOut.getvalue().strip()
+ if func_out is not None:
+ output = "\n\n".join([output, str(func_out)]).strip()
if "-s" not in message.flags:
- output = f"> `{code}`\n\n>> {output}"
+ output = f"> `{code}`\n\n>> `{output}`"
return await reply.edit(
output,
name="exec.txt",
diff --git a/app/plugins/tools.py b/app/plugins/tools.py
index b337940..83d17d4 100644
--- a/app/plugins/tools.py
+++ b/app/plugins/tools.py
@@ -5,13 +5,13 @@ from app import bot
@bot.add_cmd(cmd=["cancel", "c"])
async def cancel_task(bot, message):
- task_id = message.reply_id
+ task_id = message.replied_task_id
if not task_id:
return await message.reply(
"Reply To a Command or Bot's Response Message.", del_in=8
)
all_tasks = asyncio.all_tasks()
- tasks = [x for x in all_tasks if x.get_name() == f"{message.chat.id}-{task_id}"]
+ tasks = [x for x in all_tasks if x.get_name() == task_id]
if not tasks:
return await message.reply("Task not in Currently Running Tasks.", del_in=8)
response = ""
diff --git a/app/social_dl.py b/app/social_dl.py
index 52e364f..abcab35 100644
--- a/app/social_dl.py
+++ b/app/social_dl.py
@@ -13,7 +13,7 @@ async def dl(bot, message):
chat_id=message.chat.id, text="`trying to download...`"
)
coro = ExtractAndSendMedia.process(message)
- task = asyncio.Task(coro, name=f"{message.chat.id}-{message.id}")
+ task = asyncio.Task(coro, name=message.task_id)
media = await task
if media.exceptions:
exceptions = "\n".join(media.exceptions)
@@ -32,11 +32,11 @@ async def dl(bot, message):
@bot.on_message(filters.user_filter)
@bot.on_edited_message(filters.user_filter)
async def cmd_dispatcher(bot, message):
- parsed_message = Message.parse_message(message)
- func = Config.CMD_DICT[parsed_message.cmd]
- coro = func(bot, parsed_message)
+ message = Message.parse_message(message)
+ func = Config.CMD_DICT[message.cmd]
+ coro = func(bot, message)
try:
- task = asyncio.Task(coro, name=f"{message.chat.id}-{message.id}")
+ task = asyncio.Task(coro, name=message.task_id)
await task
except asyncio.exceptions.CancelledError:
await bot.log(text=f"#Cancelled:\n{message.text}
")
@@ -51,10 +51,10 @@ async def cmd_dispatcher(bot, message):
@bot.on_message(filters.chat_filter)
async def dl_dispatcher(bot, message):
- parsed_message = Message.parse_message(message)
- coro = dl(bot, parsed_message)
+ message = Message.parse_message(message)
+ coro = dl(bot, message)
try:
- task = asyncio.Task(coro, name=f"{message.chat.id}-{message.id}")
+ task = asyncio.Task(coro, name=message.task_id)
await task
except asyncio.exceptions.CancelledError:
await bot.log(text=f"#Cancelled:\n{message.text}
")