From 93a594beef8fbab78a10dc396d36ff5e6901b661 Mon Sep 17 00:00:00 2001 From: Ghost <62809003+Ghost6446@users.noreply.github.com> Date: Sat, 8 Jun 2024 12:39:39 +0200 Subject: [PATCH 1/5] Fix speed download --- Src/Api/Altadefinizione/site.py | 4 ++- Src/Lib/M3U8/estimator.py | 48 +++++++++++++++------------------ config.json | 2 +- 3 files changed, 26 insertions(+), 28 deletions(-) diff --git a/Src/Api/Altadefinizione/site.py b/Src/Api/Altadefinizione/site.py index 1a53609..ca271c0 100644 --- a/Src/Api/Altadefinizione/site.py +++ b/Src/Api/Altadefinizione/site.py @@ -15,6 +15,8 @@ from unidecode import unidecode from Src.Util.table import TVShowManager from Src.Util.console import console from Src.Util._jsonConfig import config_manager +from Src.Util.headers import get_headers + # Logic class from .Core.Class.SearchType import MediaManager, MediaItem @@ -44,7 +46,7 @@ def title_search(title_search: str) -> int: """ # Send request to search for titles - response = requests.get(f"https://{AD_SITE_NAME}.{AD_DOMAIN_NOW}/page/1/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3") + response = requests.get(f"https://{AD_SITE_NAME}.{AD_DOMAIN_NOW}/page/1/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3", headers={'user-agent': get_headers()}) response.raise_for_status() # Create soup and find table diff --git a/Src/Lib/M3U8/estimator.py b/Src/Lib/M3U8/estimator.py index e762a40..5fd41a7 100644 --- a/Src/Lib/M3U8/estimator.py +++ b/Src/Lib/M3U8/estimator.py @@ -22,20 +22,13 @@ TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar class M3U8_Ts_Estimator: def __init__(self, total_segments: int): - """ - Initialize the TSFileSizeCalculator object. - - Args: - - workers (int): The number of workers using with ThreadPool. - - total_segments (int): Len of total segments to download - """ self.ts_file_sizes = [] self.now_downloaded_size = 0 - self.average_over = 3 + self.average_over = 5 self.list_speeds = deque(maxlen=self.average_over) - self.smoothed_speeds = [] self.total_segments = total_segments - self.lock = threading.Lock() + self.last_segment_duration = 1 + self.last_segment_size = 0 def add_ts_file(self, size: int, size_download: int, duration: float): """ @@ -50,26 +43,23 @@ class M3U8_Ts_Estimator: logging.error("Invalid input values: size=%d, size_download=%d, duration=%f", size, size_download, duration) return - # Calculate speed outside of the lock + # Calibrazione dinamica del tempo + self.last_segment_duration = duration + + # Considerazione della variazione di dimensione del segmento + self.last_segment_size = size_download + + # Calcolo velocità try: - speed_mbps = (size_download * 8) / (duration * 1_000_000) + speed_mbps = (size_download * 8) / (duration * 1024 * 1024) + except ZeroDivisionError as e: logging.error("Division by zero error while calculating speed: %s", e) return - # Only update shared data within the lock - with self.lock: - self.ts_file_sizes.append(size) - self.now_downloaded_size += size_download - self.list_speeds.append(speed_mbps) - - # Calculate moving average - smoothed_speed = sum(self.list_speeds) / len(self.list_speeds) - self.smoothed_speeds.append(smoothed_speed) - - # Update smooth speeds - if len(self.smoothed_speeds) > self.average_over: - self.smoothed_speeds.pop(0) + self.ts_file_sizes.append(size) + self.now_downloaded_size += size_download + self.list_speeds.append(speed_mbps) def calculate_total_size(self) -> str: """ @@ -103,7 +93,13 @@ class M3U8_Ts_Estimator: Returns: float: The average speed in megabytes per second (MB/s). """ - return ((sum(self.smoothed_speeds) / len(self.smoothed_speeds)) / 8 ) * 10 # MB/s + + # Smooth the speeds for better accuracy using the window defined by average_over + smoothed_speed = sum(self.list_speeds) / min(len(self.list_speeds), self.average_over) + predicted_speed = smoothed_speed * (self.last_segment_size / (1024 * 1024)) / self.last_segment_duration + + # Convert to mb/s + return predicted_speed / 8 def get_downloaded_size(self) -> str: """ diff --git a/config.json b/config.json index e815bf8..cedc3a0 100644 --- a/config.json +++ b/config.json @@ -45,6 +45,6 @@ "SITE": { "streamingcommunity": "foo", "animeunity": "to", - "altadefinizione": "food" + "altadefinizione": "vodka" } } From 48f39042aed0e6e63e5a88285fb20e4a1531e93f Mon Sep 17 00:00:00 2001 From: Ghost <62809003+Ghost6446@users.noreply.github.com> Date: Sat, 8 Jun 2024 20:26:54 +0200 Subject: [PATCH 2/5] Fix struck at 100% --- Src/Lib/Hls/segments.py | 133 +++++++++++++++++--------------------- Src/Lib/M3U8/estimator.py | 65 +++++++++---------- 2 files changed, 91 insertions(+), 107 deletions(-) diff --git a/Src/Lib/Hls/segments.py b/Src/Lib/Hls/segments.py index 03b2d7d..2685a87 100644 --- a/Src/Lib/Hls/segments.py +++ b/Src/Lib/Hls/segments.py @@ -7,8 +7,9 @@ import queue import threading import logging import binascii +from queue import PriorityQueue +from urllib.parse import urljoin from concurrent.futures import ThreadPoolExecutor -from urllib.parse import urljoin, urlparse, urlunparse # External libraries @@ -72,10 +73,9 @@ class M3U8_Segments: self.class_url_fixer = M3U8_UrlFix(url) # Sync - self.current_index = 0 # Index of the current segment to be written - self.segment_queue = queue.PriorityQueue() # Priority queue to maintain the order of segments - self.condition = threading.Condition() # Condition variable for thread synchronization - + self.queue = PriorityQueue() + self.stop_event = threading.Event() + def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes: """ Retrieves the encryption key from the M3U8 playlist. @@ -210,83 +210,71 @@ class M3U8_Segments: - index (int): The index of the segment. - progress_bar (tqdm): Progress counter for tracking download progress. """ - - # Generate new user agent - headers_segments['user-agent'] = get_headers() - try: + # Generate headers start_time = time.time() + headers_segments['user-agent'] = get_headers() - # Generate proxy + # Make request to get content if len(PROXY_LIST) > 0: - - # Make request proxy = self.get_proxy(index) response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT, proxies=proxy) - response.raise_for_status() - else: - - # Make request response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT) - response.raise_for_status() - # Calculate duration + # Get response content + response.raise_for_status() + segment_content = response.content + + # Update bar duration = time.time() - start_time - logging.info(f"Make request to get segment: [{index} - {len(self.segments)}] in: {duration}, len data: {len(response.content)}") + response_size = int(response.headers.get('Content-Length', 0)) + self.class_ts_estimator.update_progress_bar(response_size, duration, progress_bar) + + # Decrypt the segment content if decryption is needed + if self.decryption is not None: + segment_content = self.decryption.decrypt(segment_content) - if response.ok: - - # Get the content of the segment - segment_content = response.content - - # Update bar - self.class_ts_estimator.update_progress_bar(int(response.headers.get('Content-Length', 0)), duration, progress_bar) - - # Decrypt the segment content if decryption is needed - if self.decryption is not None: - segment_content = self.decryption.decrypt(segment_content) - - with self.condition: - self.segment_queue.put((index, segment_content)) # Add the segment to the queue - self.condition.notify() # Notify the writer thread that a new segment is available - else: - logging.error(f"Failed to download segment: {ts_url}") + # Add the segment to the queue + self.queue.put((index, segment_content)) + progress_bar.update(1) except (HTTPError, ConnectionError, Timeout, RequestException) as e: + progress_bar.update(1) logging.error(f"Request-related exception while downloading segment: {e}") - except Exception as e: - logging.error(f"An unexpected exception occurred while download segment: {e}") - # Update bar - progress_bar.update(1) + except Exception as e: + progress_bar.update(1) + logging.error(f"An unexpected exception occurred while download segment: {e}") def write_segments_to_file(self): """ Writes downloaded segments to a file in the correct order. """ - with open(self.tmp_file_path, 'ab') as f: - while True: - with self.condition: - while self.segment_queue.empty() and self.current_index < len(self.segments): - self.condition.wait() # Wait until a new segment is available or all segments are downloaded + with open(self.tmp_file_path, 'wb') as f: + expected_index = 0 + buffer = {} - if self.segment_queue.empty() and self.current_index >= len(self.segments): - break # Exit loop if all segments have been processed + while not self.stop_event.is_set() or not self.queue.empty(): + try: + index, segment_content = self.queue.get(timeout=1) - if not self.segment_queue.empty(): - # Get the segment from the queue - index, segment_content = self.segment_queue.get() + if index == expected_index: + f.write(segment_content) + f.flush() + expected_index += 1 - # Write the segment to the file - if index == self.current_index: - f.write(segment_content) - self.current_index += 1 - self.segment_queue.task_done() - else: - self.segment_queue.put((index, segment_content)) # Requeue the segment if it is not the next to be written - self.condition.notify() + # Write any buffered segments in order + while expected_index in buffer: + f.write(buffer.pop(expected_index)) + f.flush() + expected_index += 1 + else: + buffer[index] = segment_content + + except queue.Empty: + continue def download_streams(self, add_desc): """ @@ -296,10 +284,11 @@ class M3U8_Segments: - add_desc (str): Additional description for the progress bar. """ if TQDM_USE_LARGE_BAR: - bar_format=f"{Colors.YELLOW}Downloading {Colors.WHITE}({add_desc}{Colors.WHITE}): {Colors.RED}{{percentage:.2f}}% {Colors.MAGENTA}{{bar}} {Colors.WHITE}| {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}| {Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]" + bar_format=f"{Colors.YELLOW}Downloading {Colors.WHITE}({add_desc}{Colors.WHITE}): {Colors.RED}{{percentage:.2f}}% {Colors.MAGENTA}{{bar}} {Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] {Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]" else: bar_format=f"{Colors.YELLOW}Proc{Colors.WHITE}: {Colors.RED}{{percentage:.2f}}% {Colors.WHITE}| {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]" - + + # Create progress bar progress_bar = tqdm( total=len(self.segments), unit='s', @@ -307,21 +296,17 @@ class M3U8_Segments: bar_format=bar_format ) + # Start a separate thread to write segments to the file + writer_thread = threading.Thread(target=self.write_segments_to_file) + writer_thread.start() + + # Start all workers with ThreadPoolExecutor(max_workers=TQDM_MAX_WORKER) as executor: - - # Start a separate thread to write segments to the file - writer_thread = threading.Thread(target=self.write_segments_to_file) - writer_thread.start() - - # Start all workers for index, segment_url in enumerate(self.segments): - - # Submit the download task to the executor executor.submit(self.make_requests_stream, segment_url, index, progress_bar) - # Wait for all segments to be downloaded - executor.shutdown() - - with self.condition: - self.condition.notify_all() # Wake up the writer thread if it's waiting - writer_thread.join() # Wait for the writer thread to finish + # Wait for all tasks to complete + executor.shutdown(wait=True) + self.stop_event.set() + writer_thread.join() + progress_bar.close() diff --git a/Src/Lib/M3U8/estimator.py b/Src/Lib/M3U8/estimator.py index 5fd41a7..8e5b8ed 100644 --- a/Src/Lib/M3U8/estimator.py +++ b/Src/Lib/M3U8/estimator.py @@ -22,13 +22,20 @@ TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar class M3U8_Ts_Estimator: def __init__(self, total_segments: int): + """ + Initialize the TSFileSizeCalculator object. + + Args: + - workers (int): The number of workers using with ThreadPool. + - total_segments (int): Len of total segments to download + """ self.ts_file_sizes = [] self.now_downloaded_size = 0 - self.average_over = 5 + self.average_over = 3 self.list_speeds = deque(maxlen=self.average_over) + self.smoothed_speeds = [] self.total_segments = total_segments - self.last_segment_duration = 1 - self.last_segment_size = 0 + self.lock = threading.Lock() def add_ts_file(self, size: int, size_download: int, duration: float): """ @@ -43,24 +50,31 @@ class M3U8_Ts_Estimator: logging.error("Invalid input values: size=%d, size_download=%d, duration=%f", size, size_download, duration) return - # Calibrazione dinamica del tempo - self.last_segment_duration = duration - - # Considerazione della variazione di dimensione del segmento - self.last_segment_size = size_download - - # Calcolo velocità - try: - speed_mbps = (size_download * 8) / (duration * 1024 * 1024) - - except ZeroDivisionError as e: - logging.error("Division by zero error while calculating speed: %s", e) - return + # Calculate speed outside of the lock + speed_mbps = (size_download * 4) / (duration * (1024 * 1024)) + # Add total size bytes self.ts_file_sizes.append(size) self.now_downloaded_size += size_download self.list_speeds.append(speed_mbps) + # Calculate moving average + smoothed_speed = sum(self.list_speeds) / len(self.list_speeds) + self.smoothed_speeds.append(smoothed_speed) + + # Update smooth speeds + if len(self.smoothed_speeds) > self.average_over: + self.smoothed_speeds.pop(0) + + def get_average_speed(self) -> float: + """ + Calculate the average speed from a list of speeds and convert it to megabytes per second (MB/s). + + Returns: + float: The average speed in megabytes per second (MB/s). + """ + return (sum(self.smoothed_speeds) / len(self.smoothed_speeds)) + def calculate_total_size(self) -> str: """ Calculate the total size of the files. @@ -86,21 +100,6 @@ class M3U8_Ts_Estimator: logging.error("An unexpected error occurred: %s", e) return "Error" - def get_average_speed(self) -> float: - """ - Calculate the average speed from a list of speeds and convert it to megabytes per second (MB/s). - - Returns: - float: The average speed in megabytes per second (MB/s). - """ - - # Smooth the speeds for better accuracy using the window defined by average_over - smoothed_speed = sum(self.list_speeds) / min(len(self.list_speeds), self.average_over) - predicted_speed = smoothed_speed * (self.last_segment_size / (1024 * 1024)) / self.last_segment_duration - - # Convert to mb/s - return predicted_speed / 8 - def get_downloaded_size(self) -> str: """ Get the total downloaded size formatted as a human-readable string. @@ -144,5 +143,5 @@ class M3U8_Ts_Estimator: else: progress_counter.set_postfix_str( f"{Colors.WHITE}[ {Colors.GREEN}{number_file_downloaded}{Colors.RED} {units_file_downloaded} " - f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed:.2f} {Colors.RED}MB/s" - ) \ No newline at end of file + f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed:.2f} {Colors.RED}Mbps" + ) From 8835eeff2ec275c85bc0439e89740b49c34eb847 Mon Sep 17 00:00:00 2001 From: Ghost <62809003+Ghost6446@users.noreply.github.com> Date: Sat, 8 Jun 2024 20:54:36 +0200 Subject: [PATCH 3/5] add start delay --- Src/Lib/Hls/segments.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Src/Lib/Hls/segments.py b/Src/Lib/Hls/segments.py index 2685a87..f1dee38 100644 --- a/Src/Lib/Hls/segments.py +++ b/Src/Lib/Hls/segments.py @@ -44,6 +44,7 @@ TQDM_MAX_WORKER = config_manager.get_int('M3U8_DOWNLOAD', 'tdqm_workers') TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar') REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout') PROXY_LIST = config_manager.get_list('REQUESTS', 'proxy') +START_THREAD_DELAY = 0.05 # Variable @@ -210,6 +211,8 @@ class M3U8_Segments: - index (int): The index of the segment. - progress_bar (tqdm): Progress counter for tracking download progress. """ + global START_THREAD_DELAY + try: # Generate headers @@ -283,6 +286,8 @@ class M3U8_Segments: Args: - add_desc (str): Additional description for the progress bar. """ + global START_THREAD_DELAY + if TQDM_USE_LARGE_BAR: bar_format=f"{Colors.YELLOW}Downloading {Colors.WHITE}({add_desc}{Colors.WHITE}): {Colors.RED}{{percentage:.2f}}% {Colors.MAGENTA}{{bar}} {Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] {Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]" else: @@ -303,6 +308,7 @@ class M3U8_Segments: # Start all workers with ThreadPoolExecutor(max_workers=TQDM_MAX_WORKER) as executor: for index, segment_url in enumerate(self.segments): + time.sleep(START_THREAD_DELAY) executor.submit(self.make_requests_stream, segment_url, index, progress_bar) # Wait for all tasks to complete From 748b1c9bfa7c554f29a390fa76c6bd9d8c992cae Mon Sep 17 00:00:00 2001 From: Ghost <62809003+Ghost6446@users.noreply.github.com> Date: Sun, 9 Jun 2024 11:41:43 +0200 Subject: [PATCH 4/5] Update validate proxy. --- README.md | 2 +- Src/Lib/Hls/proxyes.py | 88 ++++++++++++++++++++++++++++++++++++++ Src/Lib/Hls/segments.py | 57 +++++++----------------- Test/t_get_server_ip_sc.py | 86 ------------------------------------- config.json | 1 + 5 files changed, 106 insertions(+), 128 deletions(-) create mode 100644 Src/Lib/Hls/proxyes.py delete mode 100644 Test/t_get_server_ip_sc.py diff --git a/README.md b/README.md index 6bb3962..3bd8f76 100644 --- a/README.md +++ b/README.md @@ -100,7 +100,7 @@ You can change some behaviors by tweaking the configuration file. - **Default Value**: `false` * **proxy**: The proxy to use for requests. (Note: This parameter works only with HTTP and HTTPS protocols.) - - **Example Value**: `[{'protocol': 'http', 'ip': '123.45.67.89', 'port': '8080', 'username': 'your_username', 'password': 'your_password'}, {'protocol': 'https', 'ip': '123.45.67.89', 'port': '8080', 'username': 'your_username', 'password': 'your_password'}]` + - **Example Value**: `["http://user:pass@38.154.227.167:5868"]` diff --git a/Src/Lib/Hls/proxyes.py b/Src/Lib/Hls/proxyes.py new file mode 100644 index 0000000..8a0fda3 --- /dev/null +++ b/Src/Lib/Hls/proxyes.py @@ -0,0 +1,88 @@ +# 09.06.24 + +import time +import logging +from concurrent.futures import ThreadPoolExecutor + + +# External libraries +import requests + + +# Internal utilities +from Src.Util._jsonConfig import config_manager + + +class ProxyManager: + def __init__(self, proxy_list=None, url=None): + """ + Initialize ProxyManager with a list of proxies and timeout. + + Args: + - proxy_list: List of proxy strings + - timeout: Timeout for proxy requests + """ + self.proxy_list = proxy_list or [] + self.verified_proxies = [] + self.failed_proxies = {} + self.timeout = config_manager.get_float('REQUESTS', 'timeout') + self.url = url + + def _check_proxy(self, proxy): + """ + Check if a single proxy is working by making a request to Google. + + Args: + - proxy: Proxy string to be checked + + Returns: + - Proxy string if working, None otherwise + """ + protocol = proxy.split(":")[0].lower() + + try: + response = requests.get(self.url, proxies={protocol: proxy}, timeout=self.timeout) + + if response.status_code == 200: + logging.info(f"Proxy {proxy} is working.") + return proxy + + except requests.RequestException as e: + logging.error(f"Proxy {proxy} failed: {e}") + self.failed_proxies[proxy] = time.time() + return None + + def verify_proxies(self): + """ + Verify all proxies in the list and store the working ones. + """ + logging.info("Starting proxy verification...") + with ThreadPoolExecutor(max_workers=10) as executor: + self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list)) + self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy] + logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.") + + def get_verified_proxies(self): + """ + Get validate proxies. + """ + validate_proxy = [] + + for proxy in self.verified_proxies: + protocol = proxy.split(":")[0].lower() + validate_proxy.append({protocol: proxy}) + + return validate_proxy + + +def main_test_proxy(url_test): + + # Get list of proxy from config.json + proxy_list = config_manager.get_list('REQUESTS', 'proxy') + + # Verify proxy + manager = ProxyManager(proxy_list, url_test) + manager.verify_proxies() + + # Return valid proxy + return manager.get_verified_proxies() diff --git a/Src/Lib/Hls/segments.py b/Src/Lib/Hls/segments.py index f1dee38..421ecea 100644 --- a/Src/Lib/Hls/segments.py +++ b/Src/Lib/Hls/segments.py @@ -32,6 +32,7 @@ from ..M3U8 import ( M3U8_Parser, M3U8_UrlFix ) +from .proxyes import main_test_proxy # Warning @@ -41,10 +42,10 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # Config TQDM_MAX_WORKER = config_manager.get_int('M3U8_DOWNLOAD', 'tdqm_workers') +TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay') TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar') REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout') -PROXY_LIST = config_manager.get_list('REQUESTS', 'proxy') -START_THREAD_DELAY = 0.05 +THERE_IS_PROXY_LIST = len(config_manager.get_list('REQUESTS', 'proxy')) > 0 # Variable @@ -152,6 +153,15 @@ class M3U8_Segments: self.class_ts_estimator.total_segments = len(self.segments) logging.info(f"Segmnets to donwload: [{len(self.segments)}]") + # Proxy + if THERE_IS_PROXY_LIST: + console.log("[red]Validate proxy.") + self.valid_proxy = main_test_proxy(self.segments[0]) + console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}") + + if len(self.valid_proxy) == 0: + sys.exit(0) + def get_info(self) -> None: """ Makes a request to the index M3U8 file to get information about segments. @@ -170,38 +180,6 @@ class M3U8_Segments: # Parse the text from the M3U8 index file self.parse_data(response.text) - def get_proxy(self, index): - """ - Returns the proxy configuration for the given index. - - Args: - - index (int): The index to select the proxy from the PROXY_LIST. - - Returns: - - dict: A dictionary containing the proxy scheme and proxy URL. - """ - try: - - # Select the proxy from the list using the index - new_proxy = PROXY_LIST[index % len(PROXY_LIST)] - proxy_scheme = new_proxy["protocol"] - - # Construct the proxy URL based on the presence of user and pass keys - if "user" in new_proxy and "pass" in new_proxy: - proxy_url = f"{proxy_scheme}://{new_proxy['user']}:{new_proxy['pass']}@{new_proxy['ip']}:{new_proxy['port']}" - elif "user" in new_proxy: - proxy_url = f"{proxy_scheme}://{new_proxy['user']}@{new_proxy['ip']}:{new_proxy['port']}" - else: - proxy_url = f"{proxy_scheme}://{new_proxy['ip']}:{new_proxy['port']}" - - logging.info(f"Proxy URL generated: {proxy_url}") - return {proxy_scheme: proxy_url} - - except KeyError as e: - logging.error(f"KeyError: Missing required key {e} in proxy configuration.") - except Exception as e: - logging.error(f"An unexpected error occurred while generating proxy URL: {e}") - def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm) -> None: """ Downloads a TS segment and adds it to the segment queue. @@ -211,8 +189,6 @@ class M3U8_Segments: - index (int): The index of the segment. - progress_bar (tqdm): Progress counter for tracking download progress. """ - global START_THREAD_DELAY - try: # Generate headers @@ -220,8 +196,9 @@ class M3U8_Segments: headers_segments['user-agent'] = get_headers() # Make request to get content - if len(PROXY_LIST) > 0: - proxy = self.get_proxy(index) + if THERE_IS_PROXY_LIST: + proxy = self.valid_proxy[index % len(self.valid_proxy)] + logging.info(f"Use proxy: {proxy}") response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT, proxies=proxy) else: response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT) @@ -286,8 +263,6 @@ class M3U8_Segments: Args: - add_desc (str): Additional description for the progress bar. """ - global START_THREAD_DELAY - if TQDM_USE_LARGE_BAR: bar_format=f"{Colors.YELLOW}Downloading {Colors.WHITE}({add_desc}{Colors.WHITE}): {Colors.RED}{{percentage:.2f}}% {Colors.MAGENTA}{{bar}} {Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] {Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]" else: @@ -308,7 +283,7 @@ class M3U8_Segments: # Start all workers with ThreadPoolExecutor(max_workers=TQDM_MAX_WORKER) as executor: for index, segment_url in enumerate(self.segments): - time.sleep(START_THREAD_DELAY) + time.sleep(TQDM_DELAY_WORKER) executor.submit(self.make_requests_stream, segment_url, index, progress_bar) # Wait for all tasks to complete diff --git a/Test/t_get_server_ip_sc.py b/Test/t_get_server_ip_sc.py deleted file mode 100644 index d89ebac..0000000 --- a/Test/t_get_server_ip_sc.py +++ /dev/null @@ -1,86 +0,0 @@ -# 13.05.24 - -import socket -import logging -import urllib3 -from urllib.parse import urlparse, urlunparse - - -import warnings -warnings.filterwarnings("ignore", category=urllib3.exceptions.InsecureRequestWarning) - - -# Variable -url_test = "https://sc-b1-18.scws-content.net/hls/100/b/d3/bd3a430d-0a13-4bec-8fcc-ea41af183555/audio/ita/0010-0100.ts?token=CiEPTIyvEoTkGk3szgDu9g&expires=1722801022" - - -def get_ip_from_url(url): - """ - Extracts the IP address from a given URL. - - Args: - url (str): The URL from which to extract the IP address. - - Returns: - str or None: The extracted IP address if successful, otherwise None. - """ - try: - parsed_url = urlparse(url) - if not parsed_url.hostname: - logging.error(f"Invalid URL: {url}") - return None - - ip_address = socket.gethostbyname(parsed_url.hostname) - return ip_address - - except Exception as e: - logging.error(f"Error: {e}") - return None - -def replace_random_number(url, random_number): - """ - Replaces a random number in the URL. - - Args: - url (str): The URL in which to replace the random number. - random_number (int): The random number to replace in the URL. - - Returns: - str: The modified URL with the random number replaced. - """ - parsed_url = urlparse(url) - parts = parsed_url.netloc.split('.') - prefix = None - - for i, part in enumerate(parts): - if '-' in part and part.startswith("sc-"): - prefix = part.split('-')[0] + '-' + part.split('-')[1] + '-' - new_part = prefix + f"{random_number:02d}" - parts[i] = new_part - break - - new_netloc = '.'.join(parts) - return urlunparse((parsed_url.scheme, new_netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment)) - -def main(): - """ - Main function to test the URL manipulation. - """ - valid_ip = [] - - for i in range(1, 36): - try: - ip = get_ip_from_url(replace_random_number(url_test, i)) - - if ip: - valid_ip.append(ip) - - except Exception as e: - logging.error(f"Error: {e}") - pass - - print(f"Valid IP addresses: {sorted(valid_ip, reverse=True)}") - - -if __name__ == '__main__': - main() diff --git a/config.json b/config.json index cedc3a0..3ac1b38 100644 --- a/config.json +++ b/config.json @@ -18,6 +18,7 @@ }, "M3U8_DOWNLOAD": { "tdqm_workers": 4, + "tqdm_delay": 0.01, "tqdm_use_large_bar": true, "download_video": true, "download_audio": true, From 74bf633cfcf1c18bd71dab829a57e496bcfcd37e Mon Sep 17 00:00:00 2001 From: Ghost <62809003+Ghost6446@users.noreply.github.com> Date: Sun, 9 Jun 2024 14:16:48 +0200 Subject: [PATCH 5/5] Update config ... --- .../player.py => Player/vixcloud.py} | 17 ----------------- Src/Api/Animeunity/anime.py | 2 +- .../player.py => Player/vixcloud.py} | 17 ----------------- Src/Api/Streamingcommunity/film.py | 2 +- Src/Api/Streamingcommunity/series.py | 2 +- Src/Lib/FFmpeg/command.py | 2 +- Src/Lib/M3U8/__init__.py | 13 ++++++++----- Src/Lib/M3U8/helper/__init__.py | 6 ++++++ .../M3U8/{decryption.py => helper/decryptor.py} | 0 Src/Lib/M3U8/{ => helper}/estimator.py | 0 Src/Lib/M3U8/{ => helper}/parser.py | 2 +- Src/Lib/M3U8/{ => helper}/url_fixer.py | 0 Src/Lib/M3U8/{lib_parser => parser}/__init__.py | 0 Src/Lib/M3U8/{lib_parser => parser}/_util.py | 0 Src/Lib/M3U8/{lib_parser => parser}/model.py | 3 +-- Src/Lib/M3U8/{lib_parser => parser}/parser.py | 2 +- Src/Lib/M3U8/{lib_parser => parser}/protocol.py | 0 Test/t_m3u8_parser.py | 4 +--- config.json | 2 +- requirements.txt | 3 +-- 20 files changed, 24 insertions(+), 53 deletions(-) rename Src/Api/Animeunity/Core/{Vix_player/player.py => Player/vixcloud.py} (92%) rename Src/Api/Streamingcommunity/Core/{Vix_player/player.py => Player/vixcloud.py} (93%) create mode 100644 Src/Lib/M3U8/helper/__init__.py rename Src/Lib/M3U8/{decryption.py => helper/decryptor.py} (100%) rename Src/Lib/M3U8/{ => helper}/estimator.py (100%) rename Src/Lib/M3U8/{ => helper}/parser.py (99%) rename Src/Lib/M3U8/{ => helper}/url_fixer.py (100%) rename Src/Lib/M3U8/{lib_parser => parser}/__init__.py (100%) rename Src/Lib/M3U8/{lib_parser => parser}/_util.py (100%) rename Src/Lib/M3U8/{lib_parser => parser}/model.py (99%) rename Src/Lib/M3U8/{lib_parser => parser}/parser.py (99%) rename Src/Lib/M3U8/{lib_parser => parser}/protocol.py (100%) diff --git a/Src/Api/Animeunity/Core/Vix_player/player.py b/Src/Api/Animeunity/Core/Player/vixcloud.py similarity index 92% rename from Src/Api/Animeunity/Core/Vix_player/player.py rename to Src/Api/Animeunity/Core/Player/vixcloud.py index c977888..dd6192c 100644 --- a/Src/Api/Animeunity/Core/Vix_player/player.py +++ b/Src/Api/Animeunity/Core/Player/vixcloud.py @@ -50,24 +50,7 @@ class VideoSource: self.series_name = series_name self.obj_title_manager: TitleManager = TitleManager() self.obj_episode_manager: EpisodeManager = EpisodeManager() - - def get_preview(self) -> None: - """ - Retrieves preview information of a media-id - """ - - try: - response = requests.post(f"https://{self.base_name}.{self.domain}/api/titles/preview/{self.media_id}", headers = self.headers) - response.raise_for_status() - - # Collect all info about preview - self.obj_preview = PreviewManager(response.json()) - - except Exception as e: - logging.error(f"Error collecting preview info: {e}") - raise - def get_count_episodes(self): """ Fetches the total count of episodes available for the anime. diff --git a/Src/Api/Animeunity/anime.py b/Src/Api/Animeunity/anime.py index 26a6357..35ac2d6 100644 --- a/Src/Api/Animeunity/anime.py +++ b/Src/Api/Animeunity/anime.py @@ -12,7 +12,7 @@ from Src.Util.message import start_message # Logic class -from .Core.Vix_player.player import VideoSource +from .Core.Player.vixcloud import VideoSource from .Core.Util import manage_selection diff --git a/Src/Api/Streamingcommunity/Core/Vix_player/player.py b/Src/Api/Streamingcommunity/Core/Player/vixcloud.py similarity index 93% rename from Src/Api/Streamingcommunity/Core/Vix_player/player.py rename to Src/Api/Streamingcommunity/Core/Player/vixcloud.py index cb26be4..1cbba36 100644 --- a/Src/Api/Streamingcommunity/Core/Vix_player/player.py +++ b/Src/Api/Streamingcommunity/Core/Player/vixcloud.py @@ -53,23 +53,6 @@ class VideoSource: self.obj_title_manager: TitleManager = TitleManager() self.obj_episode_manager: EpisodeManager = EpisodeManager() - def get_preview(self) -> None: - """ - Retrieves preview information of a media-id - """ - - try: - - response = requests.post(f"https://{self.base_name}.{self.domain}/api/titles/preview/{self.media_id}", headers=self.headers) - response.raise_for_status() - - # Collect all info about preview - self.obj_preview = PreviewManager(response.json()) - - except Exception as e: - logging.error(f"Error collecting preview info: {e}") - raise - def collect_info_seasons(self) -> None: """ Collect information about seasons. diff --git a/Src/Api/Streamingcommunity/film.py b/Src/Api/Streamingcommunity/film.py index 2b9d7bf..b71216f 100644 --- a/Src/Api/Streamingcommunity/film.py +++ b/Src/Api/Streamingcommunity/film.py @@ -13,7 +13,7 @@ from Src.Util.message import start_message # Logic class -from .Core.Vix_player.player import VideoSource +from .Core.Player.vixcloud import VideoSource # Config diff --git a/Src/Api/Streamingcommunity/series.py b/Src/Api/Streamingcommunity/series.py index 9b2ecc0..a30e847 100644 --- a/Src/Api/Streamingcommunity/series.py +++ b/Src/Api/Streamingcommunity/series.py @@ -14,7 +14,7 @@ from Src.Lib.Hls.downloader import Downloader # Logic class -from .Core.Vix_player.player import VideoSource +from .Core.Player.vixcloud import VideoSource from .Core.Util import manage_selection, map_episode_title diff --git a/Src/Lib/FFmpeg/command.py b/Src/Lib/FFmpeg/command.py index f3b85c1..aad5b46 100644 --- a/Src/Lib/FFmpeg/command.py +++ b/Src/Lib/FFmpeg/command.py @@ -21,7 +21,7 @@ from Src.Util.os import check_file_existence, suppress_output from Src.Util.console import console from .util import has_audio_stream, need_to_force_to_ts, check_ffmpeg_input, check_duration_v_a from .capture import capture_ffmpeg_real_time -from ..M3U8.parser import M3U8_Codec +from ..M3U8 import M3U8_Codec # Config diff --git a/Src/Lib/M3U8/__init__.py b/Src/Lib/M3U8/__init__.py index 08a541b..9037df3 100644 --- a/Src/Lib/M3U8/__init__.py +++ b/Src/Lib/M3U8/__init__.py @@ -1,6 +1,9 @@ -# 02.04.24 +# 09.06.24 -from .decryption import M3U8_Decryption -from .estimator import M3U8_Ts_Estimator -from .parser import M3U8_Parser, M3U8_Codec -from .url_fixer import M3U8_UrlFix \ No newline at end of file +from .helper import ( + M3U8_Decryption, + M3U8_Ts_Estimator, + M3U8_Parser, + M3U8_Codec, + M3U8_UrlFix +) \ No newline at end of file diff --git a/Src/Lib/M3U8/helper/__init__.py b/Src/Lib/M3U8/helper/__init__.py new file mode 100644 index 0000000..9888972 --- /dev/null +++ b/Src/Lib/M3U8/helper/__init__.py @@ -0,0 +1,6 @@ +# 02.04.24 + +from .decryptor import M3U8_Decryption +from .estimator import M3U8_Ts_Estimator +from .parser import M3U8_Parser, M3U8_Codec +from .url_fixer import M3U8_UrlFix \ No newline at end of file diff --git a/Src/Lib/M3U8/decryption.py b/Src/Lib/M3U8/helper/decryptor.py similarity index 100% rename from Src/Lib/M3U8/decryption.py rename to Src/Lib/M3U8/helper/decryptor.py diff --git a/Src/Lib/M3U8/estimator.py b/Src/Lib/M3U8/helper/estimator.py similarity index 100% rename from Src/Lib/M3U8/estimator.py rename to Src/Lib/M3U8/helper/estimator.py diff --git a/Src/Lib/M3U8/parser.py b/Src/Lib/M3U8/helper/parser.py similarity index 99% rename from Src/Lib/M3U8/parser.py rename to Src/Lib/M3U8/helper/parser.py index bf364eb..0554164 100644 --- a/Src/Lib/M3U8/parser.py +++ b/Src/Lib/M3U8/helper/parser.py @@ -4,7 +4,7 @@ import logging # Internal utilities -from .lib_parser import load +from ..parser import load # External libraries diff --git a/Src/Lib/M3U8/url_fixer.py b/Src/Lib/M3U8/helper/url_fixer.py similarity index 100% rename from Src/Lib/M3U8/url_fixer.py rename to Src/Lib/M3U8/helper/url_fixer.py diff --git a/Src/Lib/M3U8/lib_parser/__init__.py b/Src/Lib/M3U8/parser/__init__.py similarity index 100% rename from Src/Lib/M3U8/lib_parser/__init__.py rename to Src/Lib/M3U8/parser/__init__.py diff --git a/Src/Lib/M3U8/lib_parser/_util.py b/Src/Lib/M3U8/parser/_util.py similarity index 100% rename from Src/Lib/M3U8/lib_parser/_util.py rename to Src/Lib/M3U8/parser/_util.py diff --git a/Src/Lib/M3U8/lib_parser/model.py b/Src/Lib/M3U8/parser/model.py similarity index 99% rename from Src/Lib/M3U8/lib_parser/model.py rename to Src/Lib/M3U8/parser/model.py index c1a89ef..035e636 100644 --- a/Src/Lib/M3U8/lib_parser/model.py +++ b/Src/Lib/M3U8/parser/model.py @@ -5,7 +5,7 @@ from collections import namedtuple # Internal utilities -from ..lib_parser import parser +from ..parser import parser # Variable @@ -13,7 +13,6 @@ StreamInfo = namedtuple('StreamInfo', ['bandwidth', 'program_id', 'resolution', Media = namedtuple('Media', ['uri', 'type', 'group_id', 'language', 'name','default', 'autoselect', 'forced', 'characteristics']) - class M3U8: """ Represents a single M3U8 playlist. Should be instantiated with the content as string. diff --git a/Src/Lib/M3U8/lib_parser/parser.py b/Src/Lib/M3U8/parser/parser.py similarity index 99% rename from Src/Lib/M3U8/lib_parser/parser.py rename to Src/Lib/M3U8/parser/parser.py index dd49f96..d7c666d 100644 --- a/Src/Lib/M3U8/lib_parser/parser.py +++ b/Src/Lib/M3U8/parser/parser.py @@ -6,7 +6,7 @@ import datetime # Internal utilities -from ..lib_parser import protocol +from ..parser import protocol from ._util import ( remove_quotes, remove_quotes_parser, diff --git a/Src/Lib/M3U8/lib_parser/protocol.py b/Src/Lib/M3U8/parser/protocol.py similarity index 100% rename from Src/Lib/M3U8/lib_parser/protocol.py rename to Src/Lib/M3U8/parser/protocol.py diff --git a/Test/t_m3u8_parser.py b/Test/t_m3u8_parser.py index c6468e5..e57010e 100644 --- a/Test/t_m3u8_parser.py +++ b/Test/t_m3u8_parser.py @@ -13,13 +13,11 @@ def read_file(file_path): return m3u8_content - # Import -from Src.Lib.M3U8.lib_parser import M3U8 +from Src.Lib.M3U8.parser import M3U8 from Src.Lib.M3U8 import M3U8_Parser - # Test data obj_m3u8_parser = M3U8_Parser() base_path_file = os.path.join('Test', 'data', 'm3u8') diff --git a/config.json b/config.json index 3ac1b38..4399826 100644 --- a/config.json +++ b/config.json @@ -17,7 +17,7 @@ "proxy": [] }, "M3U8_DOWNLOAD": { - "tdqm_workers": 4, + "tdqm_workers": 3, "tqdm_delay": 0.01, "tqdm_use_large_bar": true, "download_video": true, diff --git a/requirements.txt b/requirements.txt index d9a07e1..6001546 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ requests bs4 -certifi -tqdm rich +tqdm unidecode fake-useragent \ No newline at end of file