Try optimize HLS

This commit is contained in:
Lovi 2025-02-02 19:06:05 +01:00
parent 683008029e
commit e797ee567d
15 changed files with 209 additions and 527 deletions

View File

@ -65,9 +65,6 @@ def download_film(select_title: MediaItem) -> str:
if msg.ask("[green]Do you want to continue [white]([red]y[white])[green] or return at home[white]([red]n[white]) ", choices=['y', 'n'], default='y', show_choices=True) == "n": if msg.ask("[green]Do you want to continue [white]([red]y[white])[green] or return at home[white]([red]n[white]) ", choices=['y', 'n'], default='y', show_choices=True) == "n":
frames = get_call_stack() frames = get_call_stack()
execute_search(frames[-4])""" execute_search(frames[-4])"""
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, title_name)):
os.remove(os.path.join(mp4_path, title_name))
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")

View File

@ -73,12 +73,6 @@ def download_episode(index_select: int, scrape_serie: ScrapeSerieAnime, video_so
path=os.path.join(mp4_path, title_name) path=os.path.join(mp4_path, title_name)
) )
# If download fails do not create the file
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, title_name)):
os.remove(os.path.join(mp4_path, title_name))
return "",True
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")
console.print(r_proc) console.print(r_proc)

View File

@ -65,10 +65,6 @@ def download_film(select_title: MediaItem) -> str:
frames = get_call_stack() frames = get_call_stack()
execute_search(frames[-4])""" execute_search(frames[-4])"""
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, title_name)):
os.remove(os.path.join(mp4_path, title_name))
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")
console.print(r_proc) console.print(r_proc)

View File

@ -72,11 +72,6 @@ def download_video(index_episode_selected: int, scape_info_serie: GetSerieInfo,
referer=f"{parsed_url.scheme}://{parsed_url.netloc}/", referer=f"{parsed_url.scheme}://{parsed_url.netloc}/",
) )
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, title_name)):
os.remove(os.path.join(mp4_path, title_name))
return "",True
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")
console.print(r_proc) console.print(r_proc)

View File

@ -70,13 +70,6 @@ def download_video(index_season_selected: int, index_episode_selected: int, scap
frames = get_call_stack() frames = get_call_stack()
execute_search(frames[-4])""" execute_search(frames[-4])"""
# Removes file not completed and stops other downloads
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, mp4_name)):
os.remove(os.path.join(mp4_path, mp4_name))
return "",True
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")
console.print(r_proc) console.print(r_proc)

View File

@ -94,10 +94,6 @@ def download_film(movie_details: Json_film) -> str:
frames = get_call_stack() frames = get_call_stack()
execute_search(frames[-4])""" execute_search(frames[-4])"""
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, title_name)):
os.remove(os.path.join(mp4_path, title_name))
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")
console.print(r_proc) console.print(r_proc)

View File

@ -69,10 +69,6 @@ def download_film(select_title: MediaItem) -> str:
frames = get_call_stack() frames = get_call_stack()
execute_search(frames[-4])""" execute_search(frames[-4])"""
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, title_name)):
os.remove(os.path.join(mp4_path, title_name))
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")
console.print(r_proc) console.print(r_proc)

View File

@ -71,12 +71,6 @@ def download_video(index_season_selected: int, index_episode_selected: int, scra
frames = get_call_stack() frames = get_call_stack()
execute_search(frames[-4])""" execute_search(frames[-4])"""
if r_proc == None:
if os.path.exists(os.path.join(mp4_path, mp4_name)):
os.remove(os.path.join(mp4_path, mp4_name))
return "",True
if r_proc != None: if r_proc != None:
console.print("[green]Result: ") console.print("[green]Result: ")
console.print(r_proc) console.print(r_proc)

View File

@ -39,10 +39,7 @@ from .segments import M3U8_Segments
# Config # Config
DOWNLOAD_SPECIFIC_AUDIO = config_manager.get_list('M3U8_DOWNLOAD', 'specific_list_audio') DOWNLOAD_SPECIFIC_AUDIO = config_manager.get_list('M3U8_DOWNLOAD', 'specific_list_audio')
DOWNLOAD_SPECIFIC_SUBTITLE = config_manager.get_list('M3U8_DOWNLOAD', 'specific_list_subtitles') DOWNLOAD_SPECIFIC_SUBTITLE = config_manager.get_list('M3U8_DOWNLOAD', 'specific_list_subtitles')
DOWNLOAD_VIDEO = config_manager.get_bool('M3U8_DOWNLOAD', 'download_video')
DOWNLOAD_AUDIO = config_manager.get_bool('M3U8_DOWNLOAD', 'download_audio')
MERGE_AUDIO = config_manager.get_bool('M3U8_DOWNLOAD', 'merge_audio') MERGE_AUDIO = config_manager.get_bool('M3U8_DOWNLOAD', 'merge_audio')
DOWNLOAD_SUBTITLE = config_manager.get_bool('M3U8_DOWNLOAD', 'download_sub')
MERGE_SUBTITLE = config_manager.get_bool('M3U8_DOWNLOAD', 'merge_subs') MERGE_SUBTITLE = config_manager.get_bool('M3U8_DOWNLOAD', 'merge_subs')
REMOVE_SEGMENTS_FOLDER = config_manager.get_bool('M3U8_DOWNLOAD', 'cleanup_tmp_folder') REMOVE_SEGMENTS_FOLDER = config_manager.get_bool('M3U8_DOWNLOAD', 'cleanup_tmp_folder')
FILTER_CUSTOM_REOLUTION = config_manager.get_int('M3U8_PARSER', 'force_resolution') FILTER_CUSTOM_REOLUTION = config_manager.get_int('M3U8_PARSER', 'force_resolution')
@ -57,6 +54,29 @@ list_MissingTs = []
class HttpClient:
def __init__(self, headers: dict = None):
self.headers = headers or {'User-Agent': get_headers()}
self.client = httpx.Client(headers=self.headers, timeout=max_timeout, follow_redirects=True)
def _make_request(self, url: str, return_content: bool = False):
for attempt in range(RETRY_LIMIT):
try:
response = self.client.get(url)
response.raise_for_status()
return response.content if return_content else response.text
except Exception as e:
logging.error(f"Attempt {attempt+1} failed for {url}: {str(e)}")
time.sleep(1.5 ** attempt)
return None
def get(self, url: str) -> str:
return self._make_request(url)
def get_content(self, url: str) -> bytes:
return self._make_request(url, return_content=True)
class PathManager: class PathManager:
def __init__(self, output_filename): def __init__(self, output_filename):
""" """
@ -88,66 +108,7 @@ class PathManager:
os.makedirs(self.subtitle_segments_path, exist_ok=True) os.makedirs(self.subtitle_segments_path, exist_ok=True)
class HttpClient:
def __init__(self, headers: str = None):
"""
Initializes the HttpClient with specified headers.
"""
self.headers = headers
def get(self, url: str):
"""
Sends a GET request to the specified URL and returns the response as text.
Returns:
str: The response body as text if the request is successful, None otherwise.
"""
logging.info(f"class 'HttpClient'; make request: {url}")
try:
response = httpx.get(
url=url,
headers=self.headers,
timeout=max_timeout,
follow_redirects=True
)
response.raise_for_status()
return response.text
except Exception as e:
console.print(f"Request to {url} failed with error: {e}")
return 404
def get_content(self, url):
"""
Sends a GET request to the specified URL and returns the raw response content.
Returns:
bytes: The response content as bytes if the request is successful, None otherwise.
"""
logging.info(f"class 'HttpClient'; make request: {url}")
try:
response = httpx.get(
url=url,
headers=self.headers,
timeout=max_timeout
)
response.raise_for_status()
return response.content # Return the raw response content
except Exception as e:
logging.error(f"Request to {url} failed: {response.status_code} when get content.")
return None
class ContentExtractor: class ContentExtractor:
def __init__(self):
"""
This class is responsible for extracting audio, subtitle, and video information from an M3U8 playlist.
"""
pass
def start(self, obj_parse: M3U8_Parser): def start(self, obj_parse: M3U8_Parser):
""" """
Starts the extraction process by parsing the M3U8 playlist and collecting audio, subtitle, and video data. Starts the extraction process by parsing the M3U8 playlist and collecting audio, subtitle, and video data.
@ -155,10 +116,7 @@ class ContentExtractor:
Args: Args:
obj_parse (str): The M3U8_Parser obj of the M3U8 playlist. obj_parse (str): The M3U8_Parser obj of the M3U8 playlist.
""" """
self.obj_parse = obj_parse self.obj_parse = obj_parse
# Collect audio, subtitle, and video information
self._collect_audio() self._collect_audio()
self._collect_subtitle() self._collect_subtitle()
self._collect_video() self._collect_video()
@ -417,8 +375,6 @@ class ContentDownloader:
info_dw = video_m3u8.download_streams(f"{Colors.MAGENTA}video", "video") info_dw = video_m3u8.download_streams(f"{Colors.MAGENTA}video", "video")
list_MissingTs.append(info_dw) list_MissingTs.append(info_dw)
self.stopped=list_MissingTs.pop() self.stopped=list_MissingTs.pop()
# Print duration information of the downloaded video
#print_duration_table(downloaded_video[0].get('path'))
else: else:
console.log("[cyan]Video [red]already exists.") console.log("[cyan]Video [red]already exists.")
@ -448,8 +404,6 @@ class ContentDownloader:
info_dw = audio_m3u8.download_streams(f"{Colors.MAGENTA}audio {Colors.RED}{obj_audio.get('language')}", f"audio_{obj_audio.get('language')}") info_dw = audio_m3u8.download_streams(f"{Colors.MAGENTA}audio {Colors.RED}{obj_audio.get('language')}", f"audio_{obj_audio.get('language')}")
list_MissingTs.append(info_dw) list_MissingTs.append(info_dw)
self.stopped=list_MissingTs.pop() self.stopped=list_MissingTs.pop()
# Print duration information of the downloaded audio
#print_duration_table(obj_audio.get('path'))
else: else:
console.log(f"[cyan]Audio [white]([green]{obj_audio.get('language')}[white]) [red]already exists.") console.log(f"[cyan]Audio [white]([green]{obj_audio.get('language')}[white]) [red]already exists.")
@ -822,8 +776,6 @@ class HLS_Downloader:
return None return None
else: else:
if self.stopped:
return self.stopped
return { return {
'path': self.output_filename, 'path': self.output_filename,
'url': self.m3u8_playlist, 'url': self.m3u8_playlist,
@ -851,8 +803,6 @@ class HLS_Downloader:
return None return None
else: else:
if self.stopped:
return None
return { return {
'path': self.output_filename, 'path': self.output_filename,
'url': self.m3u8_index, 'url': self.m3u8_index,
@ -978,11 +928,11 @@ class HLS_Downloader:
self.download_tracker.add_subtitle(self.content_extractor.list_available_subtitles) self.download_tracker.add_subtitle(self.content_extractor.list_available_subtitles)
# Download each type of content # Download each type of content
if DOWNLOAD_VIDEO and len(self.download_tracker.downloaded_video) > 0: if len(self.download_tracker.downloaded_video) > 0:
self.content_downloader.download_video(self.download_tracker.downloaded_video) self.content_downloader.download_video(self.download_tracker.downloaded_video)
if DOWNLOAD_AUDIO and len(self.download_tracker.downloaded_audio) > 0: if len(self.download_tracker.downloaded_audio) > 0:
self.content_downloader.download_audio(self.download_tracker.downloaded_audio) self.content_downloader.download_audio(self.download_tracker.downloaded_audio)
if DOWNLOAD_SUBTITLE and len(self.download_tracker.downloaded_subtitle) > 0: if len(self.download_tracker.downloaded_subtitle) > 0:
self.content_downloader.download_subtitle(self.download_tracker.downloaded_subtitle) self.content_downloader.download_subtitle(self.download_tracker.downloaded_subtitle)
# Join downloaded content # Join downloaded content

View File

@ -8,10 +8,10 @@ import signal
import logging import logging
import binascii import binascii
import threading import threading
from queue import PriorityQueue from queue import PriorityQueue
from urllib.parse import urljoin, urlparse from urllib.parse import urljoin, urlparse
from concurrent.futures import ThreadPoolExecutor, as_completed from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Dict
# External libraries # External libraries
@ -20,12 +20,11 @@ from tqdm import tqdm
# Internal utilities # Internal utilities
from StreamingCommunity.Util.color import Colors
from StreamingCommunity.Util.console import console from StreamingCommunity.Util.console import console
from StreamingCommunity.Util.headers import get_headers, random_headers from StreamingCommunity.Util.headers import get_headers, random_headers
from StreamingCommunity.Util.color import Colors
from StreamingCommunity.Util._jsonConfig import config_manager from StreamingCommunity.Util._jsonConfig import config_manager
from StreamingCommunity.Util.os import os_manager from StreamingCommunity.Util.os import os_manager
from StreamingCommunity.Util.call_stack import get_call_stack
# Logic class # Logic class
@ -35,27 +34,19 @@ from ...M3U8 import (
M3U8_Parser, M3U8_Parser,
M3U8_UrlFix M3U8_UrlFix
) )
from ...FFmpeg.util import print_duration_table, format_duration
from .proxyes import main_test_proxy from .proxyes import main_test_proxy
# Config # Config
TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay') TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar') TQDM_USE_LARGE_BAR = not ("android" in sys.platform or "ios" in sys.platform)
REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry') REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
REQUEST_VERIFY = False REQUEST_VERIFY = False
THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt") THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min') PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max') PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser') DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser') DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
# Variable
max_timeout = config_manager.get_int("REQUESTS", "timeout")
@ -73,8 +64,6 @@ class M3U8_Segments:
self.tmp_folder = tmp_folder self.tmp_folder = tmp_folder
self.is_index_url = is_index_url self.is_index_url = is_index_url
self.expected_real_time = None self.expected_real_time = None
self.max_timeout = max_timeout
self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts") self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
os.makedirs(self.tmp_folder, exist_ok=True) os.makedirs(self.tmp_folder, exist_ok=True)
@ -87,8 +76,8 @@ class M3U8_Segments:
self.queue = PriorityQueue() self.queue = PriorityQueue()
self.stop_event = threading.Event() self.stop_event = threading.Event()
self.downloaded_segments = set() self.downloaded_segments = set()
self.base_timeout = 1.0 self.base_timeout = 0.5
self.current_timeout = 5.0 self.current_timeout = 3.0
# Stopping # Stopping
self.interrupt_flag = threading.Event() self.interrupt_flag = threading.Event()
@ -100,86 +89,41 @@ class M3U8_Segments:
self.info_nFailed = 0 self.info_nFailed = 0
def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes: def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
"""
Retrieves the encryption key from the M3U8 playlist.
Parameters:
- m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
Returns:
bytes: The encryption key in bytes.
"""
# Construct the full URL of the key
key_uri = urljoin(self.url, m3u8_parser.keys.get('uri')) key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
parsed_url = urlparse(key_uri) parsed_url = urlparse(key_uri)
self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/" self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
logging.info(f"Uri key: {key_uri}")
# Make request to get porxy
try: try:
response = httpx.get( client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
url=key_uri, response = httpx.get(url=key_uri, **client_params)
headers={'User-Agent': get_headers()},
timeout=max_timeout
)
response.raise_for_status() response.raise_for_status()
hex_content = binascii.hexlify(response.content).decode('utf-8')
return bytes.fromhex(hex_content)
except Exception as e: except Exception as e:
raise Exception(f"Failed to fetch key from {key_uri}: {e}") raise Exception(f"Failed to fetch key: {e}")
# Convert the content of the response to hexadecimal and then to bytes
hex_content = binascii.hexlify(response.content).decode('utf-8')
byte_content = bytes.fromhex(hex_content)
logging.info(f"URI: Hex content: {hex_content}, Byte content: {byte_content}")
#console.print(f"[cyan]Find key: [red]{hex_content}")
return byte_content
def parse_data(self, m3u8_content: str) -> None: def parse_data(self, m3u8_content: str) -> None:
"""
Parses the M3U8 content to extract segment information.
Parameters:
- m3u8_content (str): The content of the M3U8 file.
"""
m3u8_parser = M3U8_Parser() m3u8_parser = M3U8_Parser()
m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content) m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
self.expected_real_time = m3u8_parser.get_duration(return_string=False)
self.expected_real_time_s = m3u8_parser.duration self.expected_real_time_s = m3u8_parser.duration
# Check if there is an encryption key in the playlis if m3u8_parser.keys:
if m3u8_parser.keys is not None: key = self.__get_key__(m3u8_parser)
try: self.decryption = M3U8_Decryption(
key,
m3u8_parser.keys.get('iv'),
m3u8_parser.keys.get('method')
)
# Extract byte from the key self.segments = [
key = self.__get_key__(m3u8_parser) self.class_url_fixer.generate_full_url(seg)
if "http" not in seg else seg
except Exception as e: for seg in m3u8_parser.segments
raise Exception(f"Failed to retrieve encryption key {e}.") ]
iv = m3u8_parser.keys.get('iv')
method = m3u8_parser.keys.get('method')
logging.info(f"M3U8_Decryption - IV: {iv}, method: {method}")
# Create a decryption object with the key and set the method
self.decryption = M3U8_Decryption(key, iv, method)
# Store the segment information parsed from the playlist
self.segments = m3u8_parser.segments
# Fix URL if it is incomplete (missing 'http')
for i in range(len(self.segments)):
segment_url = self.segments[i]
if "http" not in segment_url:
self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
# Update segments for estimator
self.class_ts_estimator.total_segments = len(self.segments) self.class_ts_estimator.total_segments = len(self.segments)
logging.info(f"Segmnets to download: [{len(self.segments)}]")
# Proxy # Proxy
if THERE_IS_PROXY_LIST: if THERE_IS_PROXY_LIST:
@ -191,35 +135,18 @@ class M3U8_Segments:
sys.exit(0) sys.exit(0)
def get_info(self) -> None: def get_info(self) -> None:
"""
Makes a request to the index M3U8 file to get information about segments.
"""
if self.is_index_url: if self.is_index_url:
try: try:
client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
# Send a GET request to retrieve the index M3U8 file response = httpx.get(self.url, **client_params)
response = httpx.get(
self.url,
headers={'User-Agent': get_headers()},
timeout=max_timeout,
follow_redirects=True
)
response.raise_for_status() response.raise_for_status()
# Save the M3U8 file to the temporary folder
path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
open(path_m3u8_file, "w+").write(response.text)
# Parse the text from the M3U8 index file
self.parse_data(response.text) self.parse_data(response.text)
with open(os.path.join(self.tmp_folder, "playlist.m3u8"), "w") as f:
f.write(response.text)
except Exception as e: except Exception as e:
print(f"Error during M3U8 index request: {e}") raise RuntimeError(f"M3U8 info retrieval failed: {e}")
else:
# Parser data of content of index pass in input to class
self.parse_data(self.url)
def setup_interrupt_handler(self): def setup_interrupt_handler(self):
""" """
@ -237,7 +164,19 @@ class M3U8_Segments:
else: else:
print("Signal handler must be set in the main thread") print("Signal handler must be set in the main thread")
def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None: def _get_http_client(self, index: int = None):
client_params = {
'headers': random_headers(self.key_base_url) if hasattr(self, 'key_base_url') else {'User-Agent': get_headers()},
'timeout': MAX_TIMEOOUT,
'follow_redirects': True
}
if THERE_IS_PROXY_LIST and index is not None and hasattr(self, 'valid_proxy'):
client_params['proxies'] = self.valid_proxy[index % len(self.valid_proxy)]
return httpx.Client(**client_params)
def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.3) -> None:
""" """
Downloads a TS segment and adds it to the segment queue with retry logic. Downloads a TS segment and adds it to the segment queue with retry logic.
@ -253,84 +192,36 @@ class M3U8_Segments:
return return
try: try:
start_time = time.time() with self._get_http_client(index) as client:
start_time = time.time()
response = client.get(ts_url)
# Make request to get content # Validate response and content
if THERE_IS_PROXY_LIST: response.raise_for_status()
segment_content = response.content
content_size = len(segment_content)
duration = time.time() - start_time
# Get proxy from list # Decrypt if needed and verify decrypted content
proxy = self.valid_proxy[index % len(self.valid_proxy)] if self.decryption is not None:
logging.info(f"Use proxy: {proxy}") try:
segment_content = self.decryption.decrypt(segment_content)
with httpx.Client(proxies=proxy, verify=REQUEST_VERIFY) as client: except Exception as e:
if 'key_base_url' in self.__dict__: logging.error(f"Decryption failed for segment {index}: {str(e)}")
response = client.get( self.interrupt_flag.set() # Interrupt the download process
url=ts_url, self.stop_event.set() # Trigger the stopping event for all threads
headers=random_headers(self.key_base_url), break # Stop the current task immediately
timeout=max_timeout,
follow_redirects=True
)
else: self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
response = client.get( self.queue.put((index, segment_content))
url=ts_url, self.downloaded_segments.add(index)
headers={'User-Agent': get_headers()}, progress_bar.update(1)
timeout=max_timeout, return
follow_redirects=True
)
else:
with httpx.Client(verify=REQUEST_VERIFY) as client_2:
if 'key_base_url' in self.__dict__:
response = client_2.get(
url=ts_url,
headers=random_headers(self.key_base_url),
timeout=max_timeout,
follow_redirects=True
)
else:
response = client_2.get(
url=ts_url,
headers={'User-Agent': get_headers()},
timeout=max_timeout,
follow_redirects=True
)
# Validate response and content
response.raise_for_status()
segment_content = response.content
content_size = len(segment_content)
duration = time.time() - start_time
# Decrypt if needed and verify decrypted content
if self.decryption is not None:
try:
segment_content = self.decryption.decrypt(segment_content)
except Exception as e:
logging.error(f"Decryption failed for segment {index}: {str(e)}")
self.interrupt_flag.set() # Interrupt the download process
self.stop_event.set() # Trigger the stopping event for all threads
break # Stop the current task immediately
# Update progress and queue
self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
# Add the segment to the queue
self.queue.put((index, segment_content))
# Track successfully downloaded segments
self.downloaded_segments.add(index)
progress_bar.update(1)
# Break out of the loop on success
return
except Exception as e: except Exception as e:
logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}") logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
# Update stat variable class
if attempt > self.info_maxRetry: if attempt > self.info_maxRetry:
self.info_maxRetry = ( attempt + 1 ) self.info_maxRetry = ( attempt + 1 )
self.info_nRetry += 1 self.info_nRetry += 1
@ -341,8 +232,6 @@ class M3U8_Segments:
progress_bar.update(1) progress_bar.update(1)
self.info_nFailed += 1 self.info_nFailed += 1
#break
sleep_time = backoff_factor * (2 ** attempt) sleep_time = backoff_factor * (2 ** attempt)
logging.info(f"Retrying segment {index} in {sleep_time} seconds...") logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
time.sleep(sleep_time) time.sleep(sleep_time)
@ -353,7 +242,6 @@ class M3U8_Segments:
""" """
buffer = {} buffer = {}
expected_index = 0 expected_index = 0
segments_written = set()
with open(self.tmp_file_path, 'wb') as f: with open(self.tmp_file_path, 'wb') as f:
while not self.stop_event.is_set() or not self.queue.empty(): while not self.stop_event.is_set() or not self.queue.empty():
@ -375,7 +263,6 @@ class M3U8_Segments:
# Write segment if it's the next expected one # Write segment if it's the next expected one
if index == expected_index: if index == expected_index:
f.write(segment_content) f.write(segment_content)
segments_written.add(index)
f.flush() f.flush()
expected_index += 1 expected_index += 1
@ -385,7 +272,6 @@ class M3U8_Segments:
if next_segment is not None: if next_segment is not None:
f.write(next_segment) f.write(next_segment)
segments_written.add(expected_index)
f.flush() f.flush()
expected_index += 1 expected_index += 1
@ -394,8 +280,7 @@ class M3U8_Segments:
buffer[index] = segment_content buffer[index] = segment_content
except queue.Empty: except queue.Empty:
self.current_timeout = min(self.max_timeout, self.current_timeout * 1.25) self.current_timeout = min(MAX_TIMEOOUT, self.current_timeout * 1.25)
if self.stop_event.is_set(): if self.stop_event.is_set():
break break
@ -412,84 +297,34 @@ class M3U8_Segments:
""" """
self.setup_interrupt_handler() self.setup_interrupt_handler()
# Get config site from prev stack
frames = get_call_stack()
logging.info(f"Extract info from: {frames}")
config_site = str(frames[-4]['folder_base'])
logging.info(f"Use frame: {frames[-1]}")
# Workers to use for downloading
TQDM_MAX_WORKER = 0
# Select audio workers from folder of frames stack prev call.
try:
VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
except:
#VIDEO_WORKERS = os.cpu_count()
VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
try:
AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
except:
#AUDIO_WORKERS = os.cpu_count()
AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
# Differnt workers for audio and video
if "video" in str(type):
TQDM_MAX_WORKER = VIDEO_WORKERS
if "audio" in str(type):
TQDM_MAX_WORKER = AUDIO_WORKERS
#console.print(f"[cyan]Video workers[white]: [green]{VIDEO_WORKERS} [white]| [cyan]Audio workers[white]: [green]{AUDIO_WORKERS}")
# Custom bar for mobile and pc
if TQDM_USE_LARGE_BAR:
bar_format = (
f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{description}{Colors.WHITE}): "
f"{Colors.RED}{{percentage:.2f}}% "
f"{Colors.MAGENTA}{{bar}} "
f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
)
else:
bar_format = (
f"{Colors.YELLOW}Proc{Colors.WHITE}: "
f"{Colors.RED}{{percentage:.2f}}% "
f"{Colors.WHITE}| "
f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
)
# Create progress bar
progress_bar = tqdm( progress_bar = tqdm(
total=len(self.segments), total=len(self.segments),
unit='s', unit='s',
ascii='░▒█', ascii='░▒█',
bar_format=bar_format, bar_format=self._get_bar_format(description),
mininterval=0.05 mininterval=0.05
) )
try: try:
# Start writer thread
writer_thread = threading.Thread(target=self.write_segments_to_file) writer_thread = threading.Thread(target=self.write_segments_to_file)
writer_thread.daemon = True writer_thread.daemon = True
writer_thread.start() writer_thread.start()
# Configure workers and delay # Configure workers and delay
max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER max_workers = self._get_worker_count(type)
delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
# Download segments with completion verification # Download segments with completion verification
with ThreadPoolExecutor(max_workers=max_workers) as executor: with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = [] futures = []
for index, segment_url in enumerate(self.segments): for index, segment_url in enumerate(self.segments):
# Check for interrupt before submitting each task # Check for interrupt before submitting each task
if self.interrupt_flag.is_set(): if self.interrupt_flag.is_set():
break break
time.sleep(delay) time.sleep(delay)
futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar)) futures.append(executor.submit(self.download_segment, segment_url, index, progress_bar))
# Wait for futures with interrupt handling # Wait for futures with interrupt handling
for future in as_completed(futures): for future in as_completed(futures):
@ -515,61 +350,87 @@ class M3U8_Segments:
break break
try: try:
self.make_requests_stream(self.segments[index], index, progress_bar) self.download_segment(self.segments[index], index, progress_bar)
except Exception as e: except Exception as e:
logging.error(f"Failed to retry segment {index}: {str(e)}") logging.error(f"Failed to retry segment {index}: {str(e)}")
except Exception as e:
logging.error(f"Download failed: {str(e)}")
raise
finally: finally:
self._cleanup_resources(writer_thread, progress_bar)
# Clean up resources if not self.interrupt_flag.is_set():
self.stop_event.set() self._verify_download_completion()
writer_thread.join(timeout=30)
progress_bar.close()
# Check if download was interrupted return self._generate_results(type)
if self.download_interrupted:
console.log("[red] Download was manually stopped.")
# Clean up
def _get_bar_format(self, description: str) -> str:
"""
Generate platform-appropriate progress bar format.
"""
if not TQDM_USE_LARGE_BAR:
return (
f"{Colors.YELLOW}Proc{Colors.WHITE}: "
f"{Colors.RED}{{percentage:.2f}}% "
f"{Colors.WHITE}| "
f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
)
else:
return (
f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{description}{Colors.WHITE}): "
f"{Colors.RED}{{percentage:.2f}}% "
f"{Colors.MAGENTA}{{bar}} "
f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
)
def _get_worker_count(self, stream_type: str) -> int:
"""
Calculate optimal parallel workers based on stream type and infrastructure.
"""
base_workers = {
'video': DEFAULT_VIDEO_WORKERS,
'audio': DEFAULT_AUDIO_WORKERS
}.get(stream_type.lower(), 1)
if THERE_IS_PROXY_LIST:
return min(len(self.valid_proxy), base_workers * 2)
return base_workers
def _generate_results(self, stream_type: str) -> Dict:
"""Package final download results."""
return {
'type': stream_type,
'nFailed': self.info_nFailed,
'stopped': self.download_interrupted
}
def _verify_download_completion(self) -> None:
"""Validate final download integrity."""
total = len(self.segments)
if len(self.downloaded_segments) / total < 0.999:
missing = sorted(set(range(total)) - self.downloaded_segments)
raise RuntimeError(f"Download incomplete ({len(self.downloaded_segments)/total:.1%}). Missing segments: {missing}")
def _cleanup_resources(self, writer_thread: threading.Thread, progress_bar: tqdm) -> None:
"""Ensure resource cleanup and final reporting."""
self.stop_event.set() self.stop_event.set()
writer_thread.join(timeout=30) writer_thread.join(timeout=30)
progress_bar.close() progress_bar.close()
# Final verification
try:
final_completion = (len(self.downloaded_segments) / total_segments) * 100
if final_completion < 99.9: # Less than 99.9% complete
missing = set(range(total_segments)) - self.downloaded_segments
raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
except:
pass
# Verify output file
if not os.path.exists(self.tmp_file_path):
raise Exception("Output file missing")
file_size = os.path.getsize(self.tmp_file_path)
if file_size == 0:
raise Exception("Output file is empty")
# Display additional info when there is missing stream file
if self.info_nFailed > 0:
# Get expected time
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
if self.info_nRetry >= len(self.segments) * 0.3:
console.print("[yellow]⚠ Warning:[/yellow] Too many retries detected! Consider reducing the number of [cyan]workers[/cyan] in the [magenta]config.json[/magenta] file. This will impact [bold]performance[/bold]. \n")
# Info to return
if self.download_interrupted: if self.download_interrupted:
return {'type': type, 'nFailed': self.info_nFailed, 'stopped': bool(True)} console.print("\n[red]Download terminated by user")
return {'type': type, 'nFailed': self.info_nFailed, 'stopped': bool(False)}
if self.info_nFailed > 0:
self._display_error_summary()
def _display_error_summary(self) -> None:
"""Generate final error report."""
console.print(f"\n[cyan]Retry Summary: "
f"[white]Max retries: [green]{self.info_maxRetry} "
f"[white]Total retries: [green]{self.info_nRetry} "
f"[white]Failed segments: [red]{self.info_nFailed}")
if self.info_nRetry > len(self.segments) * 0.3:
console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")

View File

@ -32,7 +32,7 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Config # Config
GET_ONLY_LINK = config_manager.get_bool('M3U8_PARSER', 'get_only_link') GET_ONLY_LINK = config_manager.get_bool('M3U8_PARSER', 'get_only_link')
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar') TQDM_USE_LARGE_BAR = not ("android" in sys.platform or "ios" in sys.platform)
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout') REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
#Ending constant #Ending constant

View File

@ -28,7 +28,7 @@ USERNAME = str(config_manager.get_dict('DEFAULT', 'config_qbit_tor')['user'])
PASSWORD = str(config_manager.get_dict('DEFAULT', 'config_qbit_tor')['pass']) PASSWORD = str(config_manager.get_dict('DEFAULT', 'config_qbit_tor')['pass'])
# Config # Config
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar') TQDM_USE_LARGE_BAR = not ("android" in sys.platform or "ios" in sys.platform)
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout') REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')

View File

@ -31,7 +31,7 @@ FFMPEG_DEFAULT_PRESET = config_manager.get("M3U8_CONVERSION", "default_preset")
# Variable # Variable
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar') TQDM_USE_LARGE_BAR = not ("android" in sys.platform or "ios" in sys.platform)
FFMPEG_PATH = os_summary.ffmpeg_path FFMPEG_PATH = os_summary.ffmpeg_path

View File

@ -1,6 +1,6 @@
# 21.04.25 # 21.04.25
import os import sys
import time import time
import logging import logging
import threading import threading
@ -15,11 +15,10 @@ from tqdm import tqdm
# Internal utilities # Internal utilities
from StreamingCommunity.Util.color import Colors from StreamingCommunity.Util.color import Colors
from StreamingCommunity.Util.os import internet_manager from StreamingCommunity.Util.os import internet_manager
from StreamingCommunity.Util._jsonConfig import config_manager
# Variable # Variable
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar') TQDM_USE_LARGE_BAR = not ("android" in sys.platform or "ios" in sys.platform)
class M3U8_Ts_Estimator: class M3U8_Ts_Estimator:
@ -35,13 +34,10 @@ class M3U8_Ts_Estimator:
self.total_segments = total_segments self.total_segments = total_segments
self.lock = threading.Lock() self.lock = threading.Lock()
self.speed = {"upload": "N/A", "download": "N/A"} self.speed = {"upload": "N/A", "download": "N/A"}
self.process_pid = os.getpid() # Get current process PID
logging.debug(f"Initializing M3U8_Ts_Estimator with PID: {self.process_pid}")
# Start the speed capture thread if TQDM_USE_LARGE_BAR is True
if TQDM_USE_LARGE_BAR: if TQDM_USE_LARGE_BAR:
logging.debug("TQDM_USE_LARGE_BAR is True, starting speed capture thread") logging.debug("TQDM_USE_LARGE_BAR is True, starting speed capture thread")
self.speed_thread = threading.Thread(target=self.capture_speed, args=(1, self.process_pid)) self.speed_thread = threading.Thread(target=self.capture_speed)
self.speed_thread.daemon = True self.speed_thread.daemon = True
self.speed_thread.start() self.speed_thread.start()
@ -50,8 +46,6 @@ class M3U8_Ts_Estimator:
def add_ts_file(self, size: int, size_download: int, duration: float): def add_ts_file(self, size: int, size_download: int, duration: float):
"""Add a file size to the list of file sizes.""" """Add a file size to the list of file sizes."""
logging.debug(f"Adding ts file - size: {size}, download size: {size_download}, duration: {duration}")
if size <= 0 or size_download <= 0 or duration <= 0: if size <= 0 or size_download <= 0 or duration <= 0:
logging.error(f"Invalid input values: size={size}, size_download={size_download}, duration={duration}") logging.error(f"Invalid input values: size={size}, size_download={size_download}, duration={duration}")
return return
@ -60,95 +54,36 @@ class M3U8_Ts_Estimator:
self.now_downloaded_size += size_download self.now_downloaded_size += size_download
logging.debug(f"Current total downloaded size: {self.now_downloaded_size}") logging.debug(f"Current total downloaded size: {self.now_downloaded_size}")
def capture_speed(self, interval: float = 1, pid: int = None): def capture_speed(self, interval: float = 1):
"""Capture the internet speed periodically.""" """Capture the internet speed periodically."""
logging.debug(f"Starting speed capture with interval {interval}s for PID: {pid}") last_upload, last_download = 0, 0
speed_buffer = deque(maxlen=3)
def get_network_io(process=None):
try:
if process:
# For process-specific monitoring
connections = process.connections(kind='inet')
if connections:
io_counters = process.io_counters()
logging.debug(f"Process IO counters: {io_counters}")
return io_counters
else:
logging.debug("No active internet connections found for process")
return None
else:
# For system-wide monitoring
io_counters = psutil.net_io_counters()
logging.debug(f"System IO counters: {io_counters}")
return io_counters
except Exception as e:
logging.error(f"Error getting network IO: {str(e)}")
return None
try:
process = psutil.Process(pid) if pid else None
logging.debug(f"Monitoring process: {process}")
except Exception as e:
logging.error(f"Failed to get process with PID {pid}: {str(e)}")
process = None
last_upload = None
last_download = None
first_run = True
# Buffer circolare per le ultime N misurazioni
speed_buffer_size = 3
speed_buffer = deque(maxlen=speed_buffer_size)
while True: while True:
try: try:
io_counters = get_network_io() io_counters = psutil.net_io_counters()
if not io_counters:
raise ValueError("No IO counters available")
if io_counters: current_upload, current_download = io_counters.bytes_sent, io_counters.bytes_recv
current_upload = io_counters.bytes_sent if last_upload and last_download:
current_download = io_counters.bytes_recv upload_speed = (current_upload - last_upload) / interval
download_speed = (current_download - last_download) / interval
speed_buffer.append(max(0, download_speed))
if not first_run and last_upload is not None and last_download is not None: with self.lock:
self.speed = {
"upload": internet_manager.format_transfer_speed(max(0, upload_speed)),
"download": internet_manager.format_transfer_speed(sum(speed_buffer) / len(speed_buffer))
}
logging.debug(f"Updated speeds - Upload: {self.speed['upload']}, Download: {self.speed['download']}")
# Calcola la velocità istantanea last_upload, last_download = current_upload, current_download
upload_speed = max(0, (current_upload - last_upload) / interval)
download_speed = max(0, (current_download - last_download) / interval)
# Aggiungi al buffer
speed_buffer.append(download_speed)
# Calcola la media mobile delle velocità
if len(speed_buffer) > 0:
avg_download_speed = sum(speed_buffer) / len(speed_buffer)
if avg_download_speed > 0:
with self.lock:
self.speed = {
"upload": internet_manager.format_transfer_speed(upload_speed),
"download": internet_manager.format_transfer_speed(avg_download_speed)
}
logging.debug(f"Updated speeds - Upload: {self.speed['upload']}, Download: {self.speed['download']}")
last_upload = current_upload
last_download = current_download
first_run = False
time.sleep(interval)
except Exception as e: except Exception as e:
logging.error(f"Error in speed capture loop: {str(e)}") logging.error(f"Error in speed capture: {str(e)}")
logging.exception("Full traceback:") self.speed = {"upload": "N/A", "download": "N/A"}
logging.sleep(interval)
def get_average_speed(self) -> list: time.sleep(interval)
"""Calculate the average internet speed."""
with self.lock:
logging.debug(f"Current speed data: {self.speed}")
return self.speed['download'].split(" ")
def calculate_total_size(self) -> str: def calculate_total_size(self) -> str:
""" """
@ -158,38 +93,20 @@ class M3U8_Ts_Estimator:
str: The mean size of the files in a human-readable format. str: The mean size of the files in a human-readable format.
""" """
try: try:
if len(self.ts_file_sizes) == 0:
raise ValueError("No file sizes available to calculate total size.")
total_size = sum(self.ts_file_sizes) total_size = sum(self.ts_file_sizes)
mean_size = total_size / len(self.ts_file_sizes) mean_size = total_size / len(self.ts_file_sizes)
# Return formatted mean size
return internet_manager.format_file_size(mean_size) return internet_manager.format_file_size(mean_size)
except ZeroDivisionError as e:
logging.error("Division by zero error occurred: %s", e)
return "0B"
except Exception as e: except Exception as e:
logging.error("An unexpected error occurred: %s", e) logging.error("An unexpected error occurred: %s", e)
return "Error" return "Error"
def get_downloaded_size(self) -> str:
"""
Get the total downloaded size formatted as a human-readable string.
Returns:
str: The total downloaded size as a human-readable string.
"""
return internet_manager.format_file_size(self.now_downloaded_size)
def update_progress_bar(self, total_downloaded: int, duration: float, progress_counter: tqdm) -> None: def update_progress_bar(self, total_downloaded: int, duration: float, progress_counter: tqdm) -> None:
"""Updates the progress bar with download information.""" """Updates the progress bar with download information."""
try: try:
self.add_ts_file(total_downloaded * self.total_segments, total_downloaded, duration) self.add_ts_file(total_downloaded * self.total_segments, total_downloaded, duration)
downloaded_file_size_str = self.get_downloaded_size() downloaded_file_size_str = internet_manager.format_file_size(self.now_downloaded_size)
file_total_size = self.calculate_total_size() file_total_size = self.calculate_total_size()
number_file_downloaded = downloaded_file_size_str.split(' ')[0] number_file_downloaded = downloaded_file_size_str.split(' ')[0]
@ -198,15 +115,13 @@ class M3U8_Ts_Estimator:
units_file_total_size = file_total_size.split(' ')[1] units_file_total_size = file_total_size.split(' ')[1]
if TQDM_USE_LARGE_BAR: if TQDM_USE_LARGE_BAR:
speed_data = self.get_average_speed() speed_data = self.speed['download'].split(" ")
#logging.debug(f"Speed data for progress bar: {speed_data}")
if len(speed_data) >= 2: if len(speed_data) >= 2:
average_internet_speed = speed_data[0] average_internet_speed = speed_data[0]
average_internet_unit = speed_data[1] average_internet_unit = speed_data[1]
else: else:
logging.warning(f"Invalid speed data format: {speed_data}")
average_internet_speed = "N/A" average_internet_speed = "N/A"
average_internet_unit = "" average_internet_unit = ""
@ -223,7 +138,6 @@ class M3U8_Ts_Estimator:
) )
progress_counter.set_postfix_str(progress_str) progress_counter.set_postfix_str(progress_str)
#logging.debug(f"Updated progress bar: {progress_str}")
except Exception as e: except Exception as e:
logging.error(f"Error updating progress bar: {str(e)}") logging.error(f"Error updating progress bar: {str(e)}")

View File

@ -2,9 +2,9 @@
"DEFAULT": { "DEFAULT": {
"debug": false, "debug": false,
"log_file": "app.log", "log_file": "app.log",
"log_to_file": true, "log_to_file": false,
"show_message": true, "show_message": false,
"clean_console": true, "clean_console": false,
"root_path": "Video", "root_path": "Video",
"movie_folder_name": "Movie", "movie_folder_name": "Movie",
"serie_folder_name": "TV", "serie_folder_name": "TV",
@ -17,27 +17,23 @@
"pass": "adminadmin" "pass": "adminadmin"
}, },
"add_siteName": false, "add_siteName": false,
"disable_searchDomain": false, "disable_searchDomain": true,
"not_close": false "not_close": false
}, },
"REQUESTS": { "REQUESTS": {
"timeout": 30, "timeout": 25,
"max_retry": 8, "max_retry": 8,
"proxy_start_min": 0.1, "proxy_start_min": 0.1,
"proxy_start_max": 0.5 "proxy_start_max": 0.5
}, },
"M3U8_DOWNLOAD": { "M3U8_DOWNLOAD": {
"tqdm_delay": 0.12, "tqdm_delay": 0.1,
"tqdm_use_large_bar": true,
"default_video_workser": 12, "default_video_workser": 12,
"default_audio_workser": 12, "default_audio_workser": 12,
"download_video": true,
"download_audio": true,
"merge_audio": true, "merge_audio": true,
"specific_list_audio": [ "specific_list_audio": [
"ita" "ita"
], ],
"download_sub": true,
"merge_subs": true, "merge_subs": true,
"specific_list_subtitles": [ "specific_list_subtitles": [
"eng", "eng",