mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-05 02:55:25 +00:00
Add bytes kb mb to bandwidth, Dynamic config, Dynamic upload domain, Separate thread.
This commit is contained in:
parent
30b5250c9c
commit
16b1e10cf6
@ -21,6 +21,10 @@ from Src.Util.console import console
|
||||
from Src.Util._jsonConfig import config_manager
|
||||
|
||||
|
||||
# Variable
|
||||
AUTO_UPDATE_DOMAIN = config_manager.get_bool('DEFAULT', 'auto_update_domain')
|
||||
|
||||
|
||||
|
||||
def check_url_for_content(url: str, content: str, timeout: int = 1) -> bool:
|
||||
"""
|
||||
@ -57,6 +61,7 @@ def check_url_for_content(url: str, content: str, timeout: int = 1) -> bool:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_top_level_domain(base_url: str, target_content: str, max_workers: int = os.cpu_count(), timeout: int = 2, retries: int = 1) -> str:
|
||||
"""
|
||||
Get the top-level domain (TLD) from a list of URLs.
|
||||
@ -156,8 +161,10 @@ def search_domain(site_name: str, target_content: str, base_url: str):
|
||||
tuple: The found domain and the complete URL.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
# Extract config domain
|
||||
domain = config_manager.get("SITE", site_name)
|
||||
domain = str(config_manager.get_dict("SITE", site_name)['domain'])
|
||||
console.print(f"[cyan]Test site[white]: [red]{base_url}.{domain}")
|
||||
|
||||
try:
|
||||
@ -170,23 +177,31 @@ def search_domain(site_name: str, target_content: str, base_url: str):
|
||||
console.print(f"[cyan]Use domain: [red]{domain}")
|
||||
return domain, f"{base_url}.{domain}"
|
||||
|
||||
except:
|
||||
except Exception as e:
|
||||
|
||||
# If the current domain fails, find a new one
|
||||
console.print(f"[cyan]Error test response site[white]: [red]{e}")
|
||||
print()
|
||||
console.print("[red]Extract new DOMAIN from TLD list.")
|
||||
new_domain = get_top_level_domain(base_url=base_url, target_content=target_content)
|
||||
|
||||
if new_domain is not None:
|
||||
if AUTO_UPDATE_DOMAIN:
|
||||
console.print("[red]Extract new DOMAIN from TLD list.")
|
||||
new_domain = get_top_level_domain(base_url=base_url, target_content=target_content)
|
||||
|
||||
# Update domain in config.json
|
||||
config_manager.set_key('SITE', site_name, new_domain)
|
||||
config_manager.write_config()
|
||||
if new_domain is not None:
|
||||
|
||||
# Update domain in config.json
|
||||
config_manager.config['SITE'][site_name]['domain'] = new_domain
|
||||
config_manager.write_config()
|
||||
|
||||
# Return new config domain
|
||||
console.print(f"[cyan]Use domain: [red]{new_domain}")
|
||||
return new_domain, f"{base_url}.{new_domain}"
|
||||
|
||||
else:
|
||||
logging.error(f"Failed to find a new domain for: {base_url}")
|
||||
sys.exit(0)
|
||||
|
||||
# Return new config domain
|
||||
console.print(f"[cyan]Use domain: [red]{new_domain}")
|
||||
return new_domain, f"{base_url}.{new_domain}"
|
||||
|
||||
else:
|
||||
logging.error(f"Failed to find a new domain for: {base_url}")
|
||||
logging.error(f"Update domain manually in config.json")
|
||||
sys.exit(0)
|
||||
|
||||
|
@ -21,7 +21,7 @@ from .Core.Class.SearchType import MediaManager
|
||||
|
||||
|
||||
# Variable
|
||||
from .costant import SITE_NAME, DOMAIN_NOW
|
||||
from .costant import SITE_NAME
|
||||
media_search_manager = MediaManager()
|
||||
table_show_manager = TVShowManager()
|
||||
|
||||
|
@ -3,12 +3,6 @@
|
||||
from typing import Dict, Any, List
|
||||
|
||||
|
||||
# Variable
|
||||
from ...costant import SITE_NAME, DOMAIN_NOW
|
||||
|
||||
|
||||
|
||||
|
||||
class Episode:
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.id: int = data.get('id', '')
|
||||
|
@ -15,6 +15,7 @@ class Preview:
|
||||
def __str__(self):
|
||||
return f"Preview: ID={self.id}, Title ID={self.title_id}, Created At={self.created_at}, Updated At={self.updated_at}, Video ID={self.video_id}, Viewable={self.is_viewable}, Zoom Factor={self.zoom_factor}, Filename={self.filename}, Embed URL={self.embed_url}"
|
||||
|
||||
|
||||
class Genre:
|
||||
def __init__(self, data):
|
||||
self.id = data.get("id")
|
||||
@ -28,19 +29,6 @@ class Genre:
|
||||
def __str__(self):
|
||||
return f"Genre: ID={self.id}, Name={self.name}, Type={self.type}, Hidden={self.hidden}, Created At={self.created_at}, Updated At={self.updated_at}, Pivot={self.pivot}"
|
||||
|
||||
class Image:
|
||||
def __init__(self, data):
|
||||
self.id = data.get("id")
|
||||
self.filename = data.get("filename")
|
||||
self.type = data.get("type")
|
||||
self.imageable_type = data.get("imageable_type")
|
||||
self.imageable_id = data.get("imageable_id")
|
||||
self.created_at = data.get("created_at")
|
||||
self.updated_at = data.get("updated_at")
|
||||
self.original_url_field = data.get("original_url_field")
|
||||
|
||||
def __str__(self):
|
||||
return f"Image: ID={self.id}, Filename={self.filename}, Type={self.type}, Imageable Type={self.imageable_type}, Imageable ID={self.imageable_id}, Created At={self.created_at}, Updated At={self.updated_at}, Original URL Field={self.original_url_field}"
|
||||
|
||||
class PreviewManager:
|
||||
def __init__(self, json_data):
|
||||
@ -53,11 +41,7 @@ class PreviewManager:
|
||||
self.seasons_count = json_data.get("seasons_count")
|
||||
self.genres = [Genre(genre_data) for genre_data in json_data.get("genres", [])]
|
||||
self.preview = Preview(json_data.get("preview"))
|
||||
self.images = [Image(image_data) for image_data in json_data.get("images", [])]
|
||||
|
||||
def __str__(self):
|
||||
genres_str = "\n".join(str(genre) for genre in self.genres)
|
||||
images_str = "\n".join(str(image) for image in self.images)
|
||||
return f"Title: ID={self.id}, Type={self.type}, Runtime={self.runtime}, Release Date={self.release_date}, Quality={self.quality}, Plot={self.plot}, Seasons Count={self.seasons_count}\nGenres:\n{genres_str}\nPreview:\n{self.preview}\nImages:\n{images_str}"
|
||||
|
||||
|
||||
return f"Title: ID={self.id}, Type={self.type}, Runtime={self.runtime}, Release Date={self.release_date}, Quality={self.quality}, Plot={self.plot}, Seasons Count={self.seasons_count}\nGenres:\n{genres_str}\nPreview:\n{self.preview}\n."
|
||||
|
@ -3,24 +3,6 @@
|
||||
from typing import List
|
||||
|
||||
|
||||
# Variable
|
||||
from ...costant import SITE_NAME, DOMAIN_NOW
|
||||
|
||||
|
||||
|
||||
class Image:
|
||||
def __init__(self, data: dict):
|
||||
self.imageable_id: int = data.get('imageable_id')
|
||||
self.imageable_type: str = data.get('imageable_type')
|
||||
self.filename: str = data.get('filename')
|
||||
self.type: str = data.get('type')
|
||||
self.original_url_field: str = data.get('original_url_field')
|
||||
self.url: str = f"https://cdn.{SITE_NAME}.{DOMAIN_NOW}/images/{self.filename}"
|
||||
|
||||
def __str__(self):
|
||||
return f"Image(imageable_id={self.imageable_id}, imageable_type='{self.imageable_type}', filename='{self.filename}', type='{self.type}', url='{self.url}')"
|
||||
|
||||
|
||||
class MediaItem:
|
||||
def __init__(self, data: dict):
|
||||
self.id: int = data.get('id')
|
||||
@ -31,10 +13,9 @@ class MediaItem:
|
||||
self.sub_ita: int = data.get('sub_ita')
|
||||
self.last_air_date: str = data.get('last_air_date')
|
||||
self.seasons_count: int = data.get('seasons_count')
|
||||
self.images: List[Image] = [Image(image_data) for image_data in data.get('images', [])]
|
||||
|
||||
def __str__(self):
|
||||
return f"MediaItem(id={self.id}, slug='{self.slug}', name='{self.name}', type='{self.type}', score='{self.score}', sub_ita={self.sub_ita}, last_air_date='{self.last_air_date}', seasons_count={self.seasons_count}, images={self.images})"
|
||||
return f"MediaItem(id={self.id}, slug='{self.slug}', name='{self.name}', type='{self.type}', score='{self.score}', sub_ita={self.sub_ita}, last_air_date='{self.last_air_date}', seasons_count={self.seasons_count})"
|
||||
|
||||
|
||||
class MediaManager:
|
||||
@ -82,4 +63,3 @@ class MediaManager:
|
||||
|
||||
def __str__(self):
|
||||
return f"MediaManager(num_media={len(self.media_list)})"
|
||||
|
||||
|
@ -26,6 +26,7 @@ class WindowVideo:
|
||||
def __str__(self):
|
||||
return f"WindowVideo(id={self.id}, name='{self.name}', filename='{self.filename}', size='{self.size}', quality='{self.quality}', duration='{self.duration}', views={self.views}, is_viewable={self.is_viewable}, status='{self.status}', fps={self.fps}, legacy={self.legacy}, folder_id={self.folder_id}, created_at_diff='{self.created_at_diff}')"
|
||||
|
||||
|
||||
class WindowParameter:
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.data = data
|
||||
|
@ -20,7 +20,7 @@ from .Core.Class.SearchType import MediaManager
|
||||
|
||||
|
||||
# Variable
|
||||
from .costant import SITE_NAME, DOMAIN_NOW
|
||||
from .costant import SITE_NAME
|
||||
cookie_index = config_manager.get_dict('REQUESTS', 'index')
|
||||
media_search_manager = MediaManager()
|
||||
table_show_manager = TVShowManager()
|
||||
@ -33,8 +33,11 @@ def title_search(word_to_search) -> int:
|
||||
"""
|
||||
try:
|
||||
|
||||
# Find new domain if prev dont work
|
||||
domain_to_use, _ = search_domain(SITE_NAME, '<meta property="og:site_name" content="DDLstreamitaly', f"https://{SITE_NAME}")
|
||||
|
||||
# Send request to search for titles
|
||||
response = httpx.get(f"https://{SITE_NAME}.{DOMAIN_NOW}/search/?&q={word_to_search}&quick=1&type=videobox_video&nodes=11", headers={'user-agent': get_headers()})
|
||||
response = httpx.get(f"https://{SITE_NAME}.{domain_to_use}/search/?&q={word_to_search}&quick=1&type=videobox_video&nodes=11", headers={'user-agent': get_headers()})
|
||||
response.raise_for_status()
|
||||
|
||||
# Create soup and find table
|
||||
|
@ -19,7 +19,7 @@ from .Core.Class.SearchType import MediaManager
|
||||
|
||||
|
||||
# Variable
|
||||
from .costant import DOMAIN_NOW
|
||||
from .costant import SITE_NAME
|
||||
media_search_manager = MediaManager()
|
||||
table_show_manager = TVShowManager()
|
||||
|
||||
@ -29,8 +29,11 @@ def title_search(word_to_search) -> int:
|
||||
Search for titles based on a search query.
|
||||
"""
|
||||
|
||||
# Find new domain if prev dont work
|
||||
domain_to_use, _ = search_domain(SITE_NAME, '<meta name="generator" content="Guardaserie Streaming', f"https://{SITE_NAME}")
|
||||
|
||||
# Send request to search for titles
|
||||
response = httpx.get(f"https://guardaserie.{DOMAIN_NOW}/?story={word_to_search}&do=search&subaction=search", headers={'user-agent': get_headers()})
|
||||
response = httpx.get(f"https://guardaserie.{domain_to_use}/?story={word_to_search}&do=search&subaction=search", headers={'user-agent': get_headers()})
|
||||
response.raise_for_status()
|
||||
|
||||
# Create soup and find table
|
||||
|
@ -3,10 +3,6 @@
|
||||
from typing import Dict, Any, List
|
||||
|
||||
|
||||
# Variable
|
||||
from ...costant import SITE_NAME, DOMAIN_NOW
|
||||
|
||||
|
||||
class Episode:
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.id: int = data.get('id', '')
|
||||
|
@ -15,6 +15,7 @@ class Preview:
|
||||
def __str__(self):
|
||||
return f"Preview: ID={self.id}, Title ID={self.title_id}, Created At={self.created_at}, Updated At={self.updated_at}, Video ID={self.video_id}, Viewable={self.is_viewable}, Zoom Factor={self.zoom_factor}, Filename={self.filename}, Embed URL={self.embed_url}"
|
||||
|
||||
|
||||
class Genre:
|
||||
def __init__(self, data):
|
||||
self.id = data.get("id")
|
||||
@ -28,20 +29,6 @@ class Genre:
|
||||
def __str__(self):
|
||||
return f"Genre: ID={self.id}, Name={self.name}, Type={self.type}, Hidden={self.hidden}, Created At={self.created_at}, Updated At={self.updated_at}, Pivot={self.pivot}"
|
||||
|
||||
class Image:
|
||||
def __init__(self, data):
|
||||
self.id = data.get("id")
|
||||
self.filename = data.get("filename")
|
||||
self.type = data.get("type")
|
||||
self.imageable_type = data.get("imageable_type")
|
||||
self.imageable_id = data.get("imageable_id")
|
||||
self.created_at = data.get("created_at")
|
||||
self.updated_at = data.get("updated_at")
|
||||
self.original_url_field = data.get("original_url_field")
|
||||
|
||||
def __str__(self):
|
||||
return f"Image: ID={self.id}, Filename={self.filename}, Type={self.type}, Imageable Type={self.imageable_type}, Imageable ID={self.imageable_id}, Created At={self.created_at}, Updated At={self.updated_at}, Original URL Field={self.original_url_field}"
|
||||
|
||||
class PreviewManager:
|
||||
def __init__(self, json_data):
|
||||
self.id = json_data.get("id")
|
||||
@ -53,11 +40,9 @@ class PreviewManager:
|
||||
self.seasons_count = json_data.get("seasons_count")
|
||||
self.genres = [Genre(genre_data) for genre_data in json_data.get("genres", [])]
|
||||
self.preview = Preview(json_data.get("preview"))
|
||||
self.images = [Image(image_data) for image_data in json_data.get("images", [])]
|
||||
|
||||
def __str__(self):
|
||||
genres_str = "\n".join(str(genre) for genre in self.genres)
|
||||
images_str = "\n".join(str(image) for image in self.images)
|
||||
return f"Title: ID={self.id}, Type={self.type}, Runtime={self.runtime}, Release Date={self.release_date}, Quality={self.quality}, Plot={self.plot}, Seasons Count={self.seasons_count}\nGenres:\n{genres_str}\nPreview:\n{self.preview}\nImages:\n{images_str}"
|
||||
return f"Title: ID={self.id}, Type={self.type}, Runtime={self.runtime}, Release Date={self.release_date}, Quality={self.quality}, Plot={self.plot}, Seasons Count={self.seasons_count}\nGenres:\n{genres_str}\nPreview:\n{self.preview}\n."
|
||||
|
||||
|
||||
|
@ -3,11 +3,6 @@
|
||||
from typing import List
|
||||
|
||||
|
||||
# Variable
|
||||
from ...costant import SITE_NAME, DOMAIN_NOW
|
||||
|
||||
|
||||
|
||||
class MediaItem:
|
||||
def __init__(self, data: dict):
|
||||
self.id: int = data.get('id')
|
||||
@ -68,4 +63,3 @@ class MediaManager:
|
||||
|
||||
def __str__(self):
|
||||
return f"MediaManager(num_media={len(self.media_list)})"
|
||||
|
||||
|
@ -26,6 +26,7 @@ class WindowVideo:
|
||||
def __str__(self):
|
||||
return f"WindowVideo(id={self.id}, name='{self.name}', filename='{self.filename}', size='{self.size}', quality='{self.quality}', duration='{self.duration}', views={self.views}, is_viewable={self.is_viewable}, status='{self.status}', fps={self.fps}, legacy={self.legacy}, folder_id={self.folder_id}, created_at_diff='{self.created_at_diff}')"
|
||||
|
||||
|
||||
class WindowParameter:
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.data = data
|
||||
|
140
Src/Api/tv.py
140
Src/Api/tv.py
@ -1,140 +0,0 @@
|
||||
# 3.12.23 -> 10.12.23
|
||||
|
||||
# Class import
|
||||
from Src.Util.Helper.headers import get_headers
|
||||
from Src.Util.Helper.util import convert_utf8_name
|
||||
from Src.Util.Helper.console import console, msg
|
||||
from Src.Util.m3u8 import dw_m3u8
|
||||
|
||||
# General import
|
||||
import requests, os, re, json, sys
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# [func]
|
||||
def get_token(id_tv, domain):
|
||||
session = requests.Session()
|
||||
session.get(f"https://streamingcommunity.{domain}/watch/{id_tv}")
|
||||
return session.cookies['XSRF-TOKEN']
|
||||
|
||||
def get_info_tv(id_film, title_name, site_version, domain):
|
||||
req = requests.get(f"https://streamingcommunity.{domain}/titles/{id_film}-{title_name}", headers={
|
||||
'X-Inertia': 'true',
|
||||
'X-Inertia-Version': site_version,
|
||||
'User-Agent': get_headers()
|
||||
})
|
||||
|
||||
if req.ok():
|
||||
return req.json()['props']['title']['seasons_count']
|
||||
else:
|
||||
console.log(f"[red]Error: {req.status_code}")
|
||||
sys.exit(0)
|
||||
|
||||
def get_info_season(tv_id, tv_name, domain, version, token, n_stagione):
|
||||
req = requests.get(f'https://streamingcommunity.{domain}/titles/{tv_id}-{tv_name}/stagione-{n_stagione}', headers={
|
||||
'authority': f'streamingcommunity.{domain}', 'referer': f'https://streamingcommunity.broker/titles/{tv_id}-{tv_name}',
|
||||
'user-agent': get_headers(), 'x-inertia': 'true', 'x-inertia-version': version, 'x-xsrf-token': token,
|
||||
})
|
||||
|
||||
if req.ok:
|
||||
return [{'id': ep['id'], 'n': ep['number'], 'name': ep['name']} for ep in req.json()['props']['loadedSeason']['episodes']]
|
||||
else:
|
||||
console.log(f"[red]Error: {req.status_code}")
|
||||
sys.exit(0)
|
||||
|
||||
def get_iframe(tv_id, ep_id, domain, token):
|
||||
req = requests.get(f'https://streamingcommunity.{domain}/iframe/{tv_id}', params={'episode_id': ep_id, 'next_episode': '1'}, cookies={'XSRF-TOKEN': token}, headers={
|
||||
'referer': f'https://streamingcommunity.{domain}/watch/{tv_id}?e={ep_id}',
|
||||
'user-agent': get_headers()
|
||||
})
|
||||
|
||||
if req.ok:
|
||||
url_embed = BeautifulSoup(req.text, "lxml").find("iframe").get("src")
|
||||
req_embed = requests.get(url_embed, headers = {"User-agent": get_headers()}).text
|
||||
return BeautifulSoup(req_embed, "lxml").find("body").find("script").text
|
||||
else:
|
||||
console.log(f"[red]Error: {req.status_code}")
|
||||
sys.exit(0)
|
||||
|
||||
def parse_content(embed_content):
|
||||
|
||||
# Parse parameter from req embed content
|
||||
win_video = re.search(r"window.video = {.*}", str(embed_content)).group()
|
||||
win_param = re.search(r"params: {[\s\S]*}", str(embed_content)).group()
|
||||
|
||||
# Parse parameter to make read for json
|
||||
json_win_video = "{"+win_video.split("{")[1].split("}")[0]+"}"
|
||||
json_win_param = "{"+win_param.split("{")[1].split("}")[0].replace("\n", "").replace(" ", "") + "}"
|
||||
json_win_param = json_win_param.replace(",}", "}").replace("'", '"')
|
||||
return json.loads(json_win_video), json.loads(json_win_param)
|
||||
|
||||
def get_m3u8_url(json_win_video, json_win_param):
|
||||
return f"https://vixcloud.co/playlist/{json_win_video['id']}?type=video&rendition=720p&token={json_win_param['token720p']}&expires={json_win_param['expires']}"
|
||||
|
||||
def get_m3u8_key_ep(json_win_video, json_win_param, tv_name, n_stagione, n_ep, ep_title):
|
||||
req = requests.get('https://vixcloud.co/storage/enc.key', headers={
|
||||
'referer': f'https://vixcloud.co/embed/{json_win_video["id"]}?token={json_win_param["token720p"]}&title={tv_name.replace("-", "+")}&referer=1&expires={json_win_param["expires"]}&description=S{n_stagione}%3AE{n_ep}+{ep_title.replace(" ", "+")}&nextEpisode=1',
|
||||
})
|
||||
|
||||
if req.ok:
|
||||
return "".join(["{:02x}".format(c) for c in req.content])
|
||||
else:
|
||||
console.log(f"[red]Error: {req.status_code}")
|
||||
sys.exit(0)
|
||||
|
||||
def get_m3u8_audio(json_win_video, json_win_param, tv_name, n_stagione, n_ep, ep_title):
|
||||
|
||||
req = requests.get(f'https://vixcloud.co/playlist/{json_win_video["id"]}', params={'token': json_win_param['token'], 'expires': json_win_param["expires"] }, headers={
|
||||
'referer': f'https://vixcloud.co/embed/{json_win_video["id"]}?token={json_win_param["token720p"]}&title={tv_name.replace("-", "+")}&referer=1&expires={json_win_param["expires"]}&description=S{n_stagione}%3AE{n_ep}+{ep_title.replace(" ", "+")}&nextEpisode=1'
|
||||
})
|
||||
|
||||
if req.ok:
|
||||
m3u8_cont = req.text.split()
|
||||
for row in m3u8_cont:
|
||||
if "audio" in str(row) and "ita" in str(row):
|
||||
return row.split(",")[-1].split('"')[-2]
|
||||
else:
|
||||
console.log(f"[red]Error: {req.status_code}")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def actually_dw(tv_id, eps, index_ep_select, domain, token, tv_name, season_select, lower_tv_name):
|
||||
embed_content = get_iframe(tv_id, eps[index_ep_select]['id'], domain, token)
|
||||
json_win_video, json_win_param = parse_content(embed_content)
|
||||
m3u8_url = get_m3u8_url(json_win_video, json_win_param)
|
||||
m3u8_key = get_m3u8_key_ep(json_win_video, json_win_param, tv_name, season_select, index_ep_select+1, eps[index_ep_select]['name'])
|
||||
|
||||
mp4_name = f"{lower_tv_name.replace('+', '_')}_{str(season_select)}_{str(index_ep_select+1)}"
|
||||
mp4_format = mp4_name + ".mp4"
|
||||
mp4_path = os.path.join("videos", mp4_format)
|
||||
|
||||
m3u8_url_audio = get_m3u8_audio(json_win_video, json_win_param, tv_name, season_select, index_ep_select+1, eps[index_ep_select]['name'])
|
||||
|
||||
if m3u8_url_audio != None:
|
||||
console.print("[red]=> Use m3u8 audio")
|
||||
|
||||
dw_m3u8(m3u8_url, m3u8_url_audio, m3u8_key, mp4_path)
|
||||
|
||||
|
||||
|
||||
def main_dw_tv(tv_id, tv_name, version, domain):
|
||||
|
||||
token = get_token(tv_id, domain)
|
||||
|
||||
lower_tv_name = str(tv_name).lower()
|
||||
tv_name = convert_utf8_name(lower_tv_name) # ERROR LATIN 1 IN REQ WITH ò à ù ...
|
||||
console.print(f"[blue]Season find: [red]{get_info_tv(tv_id, tv_name, version, domain)}")
|
||||
season_select = msg.ask("\n[green]Insert season number: ")
|
||||
|
||||
eps = get_info_season(tv_id, tv_name, domain, version, token, season_select)
|
||||
for ep in eps:
|
||||
console.print(f"[green]Ep: [blue]{ep['n']} [green]=> [purple]{ep['name']}")
|
||||
index_ep_select = msg.ask("\n[green]Insert ep number (use * for all episodes): ")
|
||||
|
||||
if(index_ep_select == '*'):
|
||||
for ep in eps:
|
||||
index_ep_select = int(ep['n']) - 1
|
||||
actually_dw(tv_id, eps, index_ep_select, domain, token, tv_name, season_select, lower_tv_name)
|
||||
return
|
||||
|
||||
index_ep_select = int(index_ep_select) - 1
|
||||
actually_dw(tv_id, eps, index_ep_select, domain, token, tv_name, season_select, lower_tv_name)
|
@ -3,7 +3,6 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
|
||||
@ -12,6 +11,8 @@ import httpx
|
||||
from unidecode import unidecode
|
||||
|
||||
|
||||
|
||||
|
||||
# Internal utilities
|
||||
from Src.Util.headers import get_headers
|
||||
from Src.Util._jsonConfig import config_manager
|
||||
|
@ -4,9 +4,9 @@ import os
|
||||
import sys
|
||||
import time
|
||||
import queue
|
||||
import threading
|
||||
import logging
|
||||
import binascii
|
||||
import threading
|
||||
from queue import PriorityQueue
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
@ -23,6 +23,7 @@ from Src.Util.headers import get_headers, random_headers
|
||||
from Src.Util.color import Colors
|
||||
from Src.Util._jsonConfig import config_manager
|
||||
from Src.Util.os import check_file_existence
|
||||
from Src.Util.call_stack import get_call_stack
|
||||
|
||||
|
||||
# Logic class
|
||||
@ -41,7 +42,6 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
# Config
|
||||
TQDM_MAX_WORKER = config_manager.get_int('M3U8_DOWNLOAD', 'tdqm_workers')
|
||||
TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
|
||||
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
|
||||
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
|
||||
@ -274,6 +274,27 @@ class M3U8_Segments:
|
||||
Args:
|
||||
- add_desc (str): Additional description for the progress bar.
|
||||
"""
|
||||
|
||||
# Get config site from prev stack
|
||||
frames = get_call_stack()
|
||||
config_site = str(os.path.basename(frames[-1]['folder'])).lower()
|
||||
|
||||
# Workers to use for downloading
|
||||
TQDM_MAX_WORKER = 0
|
||||
|
||||
# Select audio workers from folder of frames stack prev call.
|
||||
VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
|
||||
if VIDEO_WORKERS == -1: VIDEO_WORKERS = os.cpu_count()
|
||||
AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
|
||||
if AUDIO_WORKERS == -1: AUDIO_WORKERS = os.cpu_count()
|
||||
|
||||
# Differnt workers for audio and video
|
||||
if "video" in str(add_desc):
|
||||
TQDM_MAX_WORKER = VIDEO_WORKERS
|
||||
if "audio" in str(add_desc):
|
||||
TQDM_MAX_WORKER = AUDIO_WORKERS
|
||||
|
||||
# Custom bar for mobile and pc
|
||||
if TQDM_USE_LARGE_BAR:
|
||||
bar_format=f"{Colors.YELLOW}Downloading {Colors.WHITE}({add_desc}{Colors.WHITE}): {Colors.RED}{{percentage:.2f}}% {Colors.MAGENTA}{{bar}} {Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] {Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
|
||||
else:
|
||||
|
@ -34,7 +34,7 @@ class M3U8_Ts_Estimator:
|
||||
self.now_downloaded_size = 0
|
||||
self.total_segments = total_segments
|
||||
self.lock = threading.Lock()
|
||||
self.speeds = deque(maxlen=3)
|
||||
self.speed = 0
|
||||
self.speed_thread = threading.Thread(target=self.capture_speed)
|
||||
self.speed_thread.daemon = True
|
||||
self.speed_thread.start()
|
||||
@ -56,30 +56,44 @@ class M3U8_Ts_Estimator:
|
||||
self.ts_file_sizes.append(size)
|
||||
self.now_downloaded_size += size_download
|
||||
|
||||
def capture_speed(self, interval: float = 1.0):
|
||||
def capture_speed(self, interval: float = 0.8):
|
||||
"""
|
||||
Capture the internet speed periodically and store the values in a deque.
|
||||
"""
|
||||
def get_process_network_io(pid):
|
||||
process = psutil.Process(pid)
|
||||
io_counters = process.io_counters()
|
||||
def get_network_io():
|
||||
io_counters = psutil.net_io_counters()
|
||||
return io_counters
|
||||
|
||||
def convert_bytes_to_mbps(bytes):
|
||||
return (bytes * 8) / (1024 * 1024)
|
||||
|
||||
def format_bytes(bytes):
|
||||
if bytes < 1024:
|
||||
return f"{bytes:.2f} Bytes/s"
|
||||
elif bytes < 1024 * 1024:
|
||||
return f"{bytes / 1024:.2f} KB/s"
|
||||
else:
|
||||
return f"{bytes / (1024 * 1024):.2f} MB/s"
|
||||
|
||||
|
||||
# Get proc id
|
||||
pid = os.getpid()
|
||||
|
||||
|
||||
while True:
|
||||
old_value = get_process_network_io(pid)
|
||||
|
||||
# Get value
|
||||
old_value = get_network_io()
|
||||
time.sleep(interval)
|
||||
new_value = get_process_network_io(pid)
|
||||
bytes_sent = new_value[2] - old_value[2]
|
||||
bytes_recv = new_value[3] - old_value[3]
|
||||
mbps_recv = convert_bytes_to_mbps(bytes_recv) / interval
|
||||
new_value = get_network_io()
|
||||
|
||||
with self.lock:
|
||||
self.speeds.append(mbps_recv)
|
||||
upload_speed = (new_value.bytes_sent - old_value.bytes_sent) / interval
|
||||
download_speed = (new_value.bytes_recv - old_value.bytes_recv) / interval
|
||||
|
||||
self.speed = ({
|
||||
"upload": format_bytes(upload_speed),
|
||||
"download": format_bytes(download_speed)
|
||||
})
|
||||
|
||||
old_value = new_value
|
||||
|
||||
|
||||
def get_average_speed(self) -> float:
|
||||
"""
|
||||
@ -89,9 +103,7 @@ class M3U8_Ts_Estimator:
|
||||
float: The average internet speed in Mbps.
|
||||
"""
|
||||
with self.lock:
|
||||
if len(self.speeds) == 0:
|
||||
return 0.0
|
||||
return sum(self.speeds) / len(self.speeds)
|
||||
return self.speed['download'].split(" ")
|
||||
|
||||
def calculate_total_size(self) -> str:
|
||||
"""
|
||||
@ -148,16 +160,18 @@ class M3U8_Ts_Estimator:
|
||||
number_file_total_size = file_total_size.split(' ')[0]
|
||||
units_file_downloaded = downloaded_file_size_str.split(' ')[1]
|
||||
units_file_total_size = file_total_size.split(' ')[1]
|
||||
average_internet_speed = self.get_average_speed() / 8 # Mbps -> MB\s
|
||||
|
||||
average_internet_speed = self.get_average_speed()[0]
|
||||
average_internet_unit = self.get_average_speed()[1]
|
||||
|
||||
# Update the progress bar's postfix
|
||||
if TQDM_USE_LARGE_BAR:
|
||||
progress_counter.set_postfix_str(
|
||||
f"{Colors.WHITE}[ {Colors.GREEN}{number_file_downloaded} {Colors.WHITE}< {Colors.GREEN}{number_file_total_size} {Colors.RED}{units_file_total_size} "
|
||||
f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed:.2f} {Colors.RED}MB/s"
|
||||
f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed} {Colors.RED}{average_internet_unit}"
|
||||
)
|
||||
else:
|
||||
progress_counter.set_postfix_str(
|
||||
f"{Colors.WHITE}[ {Colors.GREEN}{number_file_downloaded}{Colors.RED} {units_file_downloaded} "
|
||||
f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed:.2f} {Colors.RED}Mbps"
|
||||
f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed} {Colors.RED}{average_internet_unit}"
|
||||
)
|
||||
|
53
Src/Util/call_stack.py
Normal file
53
Src/Util/call_stack.py
Normal file
@ -0,0 +1,53 @@
|
||||
# 21.06.24
|
||||
|
||||
import os
|
||||
import inspect
|
||||
|
||||
|
||||
def get_call_stack():
|
||||
"""
|
||||
Retrieves the current call stack with details about each call.
|
||||
|
||||
This function inspects the current call stack and returns a list of dictionaries,
|
||||
where each dictionary contains details about a function call in the stack.
|
||||
|
||||
Returns:
|
||||
list: A list of dictionaries, each containing the following keys:
|
||||
- function (str): The name of the function.
|
||||
- folder (str): The directory path of the script containing the function.
|
||||
- script (str): The name of the script file containing the function.
|
||||
- line (int): The line number in the script where the function is defined.
|
||||
|
||||
Example:
|
||||
>>> def func_a():
|
||||
... return func_b()
|
||||
...
|
||||
>>> def func_b():
|
||||
... return func_c()
|
||||
...
|
||||
>>> def func_c():
|
||||
... return get_call_stack()
|
||||
...
|
||||
>>> stack_trace = func_a()
|
||||
>>> for frame in stack_trace:
|
||||
... print(f"Function: {frame['function']}, Folder: {frame['folder']}, "
|
||||
... f"Script: {frame['script']}, Line: {frame['line']}")
|
||||
"""
|
||||
stack = inspect.stack()
|
||||
call_stack = []
|
||||
|
||||
for frame_info in stack:
|
||||
function_name = frame_info.function
|
||||
filename = frame_info.filename
|
||||
lineno = frame_info.lineno
|
||||
folder_name = os.path.dirname(filename)
|
||||
script_name = os.path.basename(filename)
|
||||
|
||||
call_stack.append({
|
||||
"function": function_name,
|
||||
"folder": folder_name,
|
||||
"script": script_name,
|
||||
"line": lineno
|
||||
})
|
||||
|
||||
return call_stack
|
83
Test/bandwidth_gui.py
Normal file
83
Test/bandwidth_gui.py
Normal file
@ -0,0 +1,83 @@
|
||||
import tkinter as tk
|
||||
from threading import Thread, Lock
|
||||
from collections import deque
|
||||
import psutil
|
||||
import time
|
||||
|
||||
class NetworkMonitor:
|
||||
def __init__(self, maxlen=10):
|
||||
self.speeds = deque(maxlen=maxlen)
|
||||
self.lock = Lock()
|
||||
|
||||
def capture_speed(self, interval: float = 0.5):
|
||||
def get_network_io():
|
||||
io_counters = psutil.net_io_counters()
|
||||
return io_counters
|
||||
|
||||
def format_bytes(bytes):
|
||||
if bytes < 1024:
|
||||
return f"{bytes:.2f} Bytes/s"
|
||||
elif bytes < 1024 * 1024:
|
||||
return f"{bytes / 1024:.2f} KB/s"
|
||||
else:
|
||||
return f"{bytes / (1024 * 1024):.2f} MB/s"
|
||||
|
||||
old_value = get_network_io()
|
||||
while True:
|
||||
time.sleep(interval)
|
||||
new_value = get_network_io()
|
||||
|
||||
with self.lock:
|
||||
upload_speed = (new_value.bytes_sent - old_value.bytes_sent) / interval
|
||||
download_speed = (new_value.bytes_recv - old_value.bytes_recv) / interval
|
||||
|
||||
self.speeds.append({
|
||||
"upload": format_bytes(upload_speed),
|
||||
"download": format_bytes(download_speed)
|
||||
})
|
||||
|
||||
old_value = new_value
|
||||
|
||||
class NetworkMonitorApp:
|
||||
def __init__(self, root):
|
||||
self.monitor = NetworkMonitor()
|
||||
self.root = root
|
||||
self.root.title("Network Bandwidth Monitor")
|
||||
self.root.geometry("400x200")
|
||||
self.root.resizable(False, False)
|
||||
|
||||
self.label_upload_header = tk.Label(text="Upload Speed:", font="Quicksand 12 bold")
|
||||
self.label_upload_header.pack()
|
||||
|
||||
self.label_upload = tk.Label(text="Calculating...", font="Quicksand 12")
|
||||
self.label_upload.pack()
|
||||
|
||||
self.label_download_header = tk.Label(text="Download Speed:", font="Quicksand 12 bold")
|
||||
self.label_download_header.pack()
|
||||
|
||||
self.label_download = tk.Label(text="Calculating...", font="Quicksand 12")
|
||||
self.label_download.pack()
|
||||
|
||||
self.attribution = tk.Label(text="\n~ WaterrMalann ~", font="Quicksand 11 italic")
|
||||
self.attribution.pack()
|
||||
|
||||
self.update_gui()
|
||||
self.start_monitoring()
|
||||
|
||||
def update_gui(self):
|
||||
with self.monitor.lock:
|
||||
if self.monitor.speeds:
|
||||
latest_speeds = self.monitor.speeds[-1]
|
||||
self.label_upload.config(text=latest_speeds["upload"])
|
||||
self.label_download.config(text=latest_speeds["download"])
|
||||
|
||||
self.root.after(250, self.update_gui) # Update every 0.25 seconds
|
||||
|
||||
def start_monitoring(self):
|
||||
self.monitor_thread = Thread(target=self.monitor.capture_speed, args=(0.5,), daemon=True)
|
||||
self.monitor_thread.start()
|
||||
|
||||
if __name__ == "__main__":
|
||||
root = tk.Tk()
|
||||
app = NetworkMonitorApp(root)
|
||||
root.mainloop()
|
@ -1,401 +0,0 @@
|
||||
ads
|
||||
africa
|
||||
analytics
|
||||
apartments
|
||||
app
|
||||
arab
|
||||
are
|
||||
art
|
||||
auction
|
||||
audio
|
||||
author
|
||||
auto
|
||||
autoinsurance
|
||||
autos
|
||||
band
|
||||
banque
|
||||
bargains
|
||||
baseball
|
||||
bcn
|
||||
beauty
|
||||
best
|
||||
bet
|
||||
bid
|
||||
bike
|
||||
bingo
|
||||
black
|
||||
blackfriday
|
||||
blog
|
||||
boats
|
||||
boo
|
||||
book
|
||||
booking
|
||||
bot
|
||||
boutique
|
||||
box
|
||||
broadway
|
||||
broker
|
||||
builders
|
||||
business
|
||||
buy
|
||||
buzz
|
||||
cab
|
||||
cafe
|
||||
call
|
||||
camera
|
||||
camp
|
||||
cancerresearch
|
||||
car
|
||||
cards
|
||||
care
|
||||
careers
|
||||
carinsurance
|
||||
cars
|
||||
casa
|
||||
cash
|
||||
cashbackbonus
|
||||
catering
|
||||
center
|
||||
channel
|
||||
chat
|
||||
cheap
|
||||
christmas
|
||||
church
|
||||
circle
|
||||
claims
|
||||
cleaning
|
||||
click
|
||||
clothing
|
||||
cloud
|
||||
club
|
||||
codes
|
||||
coffee
|
||||
college
|
||||
community
|
||||
company
|
||||
compare
|
||||
computer
|
||||
condos
|
||||
connectors
|
||||
construction
|
||||
consulting
|
||||
contact
|
||||
contractors
|
||||
cool
|
||||
corp
|
||||
country
|
||||
coupon
|
||||
coupons
|
||||
cpa
|
||||
cricket
|
||||
cruises
|
||||
dad
|
||||
dance
|
||||
data
|
||||
dating
|
||||
day
|
||||
dds
|
||||
deal
|
||||
deals
|
||||
delivery
|
||||
democrat
|
||||
desi
|
||||
design
|
||||
dev
|
||||
diamonds
|
||||
diet
|
||||
digital
|
||||
directory
|
||||
docs
|
||||
dog
|
||||
domains
|
||||
dot
|
||||
download
|
||||
earth
|
||||
eat
|
||||
email
|
||||
energy
|
||||
enterprises
|
||||
epost
|
||||
esq
|
||||
est
|
||||
estate
|
||||
events
|
||||
exchange
|
||||
expert
|
||||
exposed
|
||||
faith
|
||||
family
|
||||
fan
|
||||
fans
|
||||
farm
|
||||
fashion
|
||||
feedback
|
||||
film
|
||||
final
|
||||
fish
|
||||
fishing
|
||||
fit
|
||||
fitness
|
||||
flights
|
||||
florist
|
||||
flowers
|
||||
fly
|
||||
foo
|
||||
food
|
||||
football
|
||||
forsale
|
||||
forum
|
||||
foundation
|
||||
free
|
||||
fun
|
||||
fund
|
||||
furniture
|
||||
futbol
|
||||
fyi
|
||||
gallery
|
||||
game
|
||||
games
|
||||
garden
|
||||
gay
|
||||
gift
|
||||
gifts
|
||||
gives
|
||||
giving
|
||||
glass
|
||||
global
|
||||
golf
|
||||
got
|
||||
graphics
|
||||
group
|
||||
guide
|
||||
guitars
|
||||
guru
|
||||
hair
|
||||
haus
|
||||
help
|
||||
here
|
||||
hiphop
|
||||
hockey
|
||||
holdings
|
||||
holiday
|
||||
home
|
||||
homes
|
||||
hosting
|
||||
hot
|
||||
house
|
||||
how
|
||||
imamat
|
||||
immo
|
||||
inc
|
||||
industries
|
||||
ing
|
||||
institute
|
||||
international
|
||||
irish
|
||||
jewelry
|
||||
jot
|
||||
joy
|
||||
ketchup
|
||||
kim
|
||||
kitchen
|
||||
kosher
|
||||
krd
|
||||
land
|
||||
lat
|
||||
latino
|
||||
law
|
||||
lease
|
||||
legal
|
||||
lgbt
|
||||
life
|
||||
lifeinsurance
|
||||
lighting
|
||||
like
|
||||
limited
|
||||
limo
|
||||
link
|
||||
live
|
||||
living
|
||||
llc
|
||||
llp
|
||||
loans
|
||||
lol
|
||||
lotto
|
||||
love
|
||||
ltd
|
||||
ltda
|
||||
luxury
|
||||
mail
|
||||
maison
|
||||
management
|
||||
map
|
||||
market
|
||||
marketing
|
||||
mba
|
||||
media
|
||||
meet
|
||||
meme
|
||||
memorial
|
||||
menu
|
||||
mobile
|
||||
moi
|
||||
mom
|
||||
money
|
||||
mormon
|
||||
moto
|
||||
mov
|
||||
movie
|
||||
movistar
|
||||
network
|
||||
new
|
||||
news
|
||||
ninja
|
||||
now
|
||||
nowruz
|
||||
one
|
||||
onl
|
||||
online
|
||||
ott
|
||||
page
|
||||
partners
|
||||
parts
|
||||
party
|
||||
pay
|
||||
pet
|
||||
pets
|
||||
phd
|
||||
phone
|
||||
photo
|
||||
photography
|
||||
photos
|
||||
pics
|
||||
pictures
|
||||
pid
|
||||
pin
|
||||
pink
|
||||
pizza
|
||||
place
|
||||
play
|
||||
plumbing
|
||||
poker
|
||||
productions
|
||||
prof
|
||||
promo
|
||||
properties
|
||||
property
|
||||
pub
|
||||
qpon
|
||||
racing
|
||||
radio
|
||||
read
|
||||
realestate
|
||||
realty
|
||||
recipes
|
||||
red
|
||||
rehab
|
||||
rent
|
||||
rentals
|
||||
repair
|
||||
report
|
||||
republican
|
||||
rest
|
||||
restaurant
|
||||
review
|
||||
reviews
|
||||
rich
|
||||
rocks
|
||||
room
|
||||
rsvp
|
||||
ruhr
|
||||
run
|
||||
safe
|
||||
salon
|
||||
save
|
||||
scholarships
|
||||
school
|
||||
science
|
||||
search
|
||||
secure
|
||||
services
|
||||
shoes
|
||||
shop
|
||||
shopping
|
||||
show
|
||||
singles
|
||||
site
|
||||
ski
|
||||
smile
|
||||
soccer
|
||||
social
|
||||
software
|
||||
solar
|
||||
solutions
|
||||
spa
|
||||
spot
|
||||
srl
|
||||
storage
|
||||
store
|
||||
stream
|
||||
studio
|
||||
style
|
||||
supplies
|
||||
supply
|
||||
support
|
||||
surgery
|
||||
systems
|
||||
talk
|
||||
tattoo
|
||||
taxi
|
||||
team
|
||||
tech
|
||||
technology
|
||||
tennis
|
||||
theater
|
||||
tickets
|
||||
tienda
|
||||
tips
|
||||
tires
|
||||
today
|
||||
tools
|
||||
top
|
||||
tour
|
||||
toys
|
||||
trading
|
||||
training
|
||||
translations
|
||||
trust
|
||||
tube
|
||||
uno
|
||||
vacations
|
||||
ventures
|
||||
vet
|
||||
video
|
||||
villas
|
||||
vin
|
||||
vip
|
||||
vision
|
||||
vivo
|
||||
voyage
|
||||
wanggou
|
||||
watch
|
||||
watches
|
||||
web
|
||||
webcam
|
||||
webs
|
||||
wed
|
||||
whoswho
|
||||
win
|
||||
wine
|
||||
winners
|
||||
works
|
||||
world
|
||||
wow
|
||||
xin
|
||||
xyz
|
||||
yoga
|
||||
you
|
||||
yun
|
||||
zero
|
||||
zip
|
||||
to
|
32
config.json
32
config.json
@ -7,6 +7,7 @@
|
||||
"clean_console": true,
|
||||
"root_path": "Video",
|
||||
"map_episode_name": "%(tv_name)_S%(season)E%(episode)_%(episode_name)",
|
||||
"auto_update_domain": true,
|
||||
"not_close": false
|
||||
},
|
||||
"REQUESTS": {
|
||||
@ -21,7 +22,6 @@
|
||||
"proxy": []
|
||||
},
|
||||
"M3U8_DOWNLOAD": {
|
||||
"tdqm_workers": 2,
|
||||
"tqdm_delay": 0.01,
|
||||
"tqdm_use_large_bar": true,
|
||||
"download_video": true,
|
||||
@ -52,10 +52,30 @@
|
||||
"force_resolution": -1
|
||||
},
|
||||
"SITE": {
|
||||
"streamingcommunity": "boston",
|
||||
"animeunity": "to",
|
||||
"altadefinizione": "vodka",
|
||||
"guardaserie": "ceo",
|
||||
"ddlstreamitaly": "co"
|
||||
"streamingcommunity": {
|
||||
"video_workers": 4,
|
||||
"audio_workers": 2,
|
||||
"domain": "boston"
|
||||
},
|
||||
"animeunity": {
|
||||
"video_workers": 4,
|
||||
"audio_workers": 2,
|
||||
"domain": "to"
|
||||
},
|
||||
"altadefinizione": {
|
||||
"video_workers": -1,
|
||||
"audio_workers": -1,
|
||||
"domain": "vodka"
|
||||
},
|
||||
"guardaserie": {
|
||||
"video_workers": -1,
|
||||
"audio_workers": -1,
|
||||
"domain": "ceo"
|
||||
},
|
||||
"ddlstreamitaly": {
|
||||
"video_workers": -1,
|
||||
"audio_workers": -1,
|
||||
"domain": "co"
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user