mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-07 20:15:24 +00:00
Merge branch 'Lovi-0:main' into main
This commit is contained in:
commit
79c05203b2
@ -100,7 +100,7 @@ You can change some behaviors by tweaking the configuration file.
|
|||||||
- **Default Value**: `false`
|
- **Default Value**: `false`
|
||||||
|
|
||||||
* **proxy**: The proxy to use for requests. (Note: This parameter works only with HTTP and HTTPS protocols.)
|
* **proxy**: The proxy to use for requests. (Note: This parameter works only with HTTP and HTTPS protocols.)
|
||||||
- **Example Value**: `[{'protocol': 'http', 'ip': '123.45.67.89', 'port': '8080', 'username': 'your_username', 'password': 'your_password'}, {'protocol': 'https', 'ip': '123.45.67.89', 'port': '8080', 'username': 'your_username', 'password': 'your_password'}]`
|
- **Example Value**: `["http://user:pass@38.154.227.167:5868"]`
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
@ -15,6 +15,8 @@ from unidecode import unidecode
|
|||||||
from Src.Util.table import TVShowManager
|
from Src.Util.table import TVShowManager
|
||||||
from Src.Util.console import console
|
from Src.Util.console import console
|
||||||
from Src.Util._jsonConfig import config_manager
|
from Src.Util._jsonConfig import config_manager
|
||||||
|
from Src.Util.headers import get_headers
|
||||||
|
|
||||||
|
|
||||||
# Logic class
|
# Logic class
|
||||||
from .Core.Class.SearchType import MediaManager, MediaItem
|
from .Core.Class.SearchType import MediaManager, MediaItem
|
||||||
@ -44,7 +46,7 @@ def title_search(title_search: str) -> int:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Send request to search for titles
|
# Send request to search for titles
|
||||||
response = requests.get(f"https://{AD_SITE_NAME}.{AD_DOMAIN_NOW}/page/1/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3")
|
response = requests.get(f"https://{AD_SITE_NAME}.{AD_DOMAIN_NOW}/page/1/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3", headers={'user-agent': get_headers()})
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Create soup and find table
|
# Create soup and find table
|
||||||
|
@ -51,23 +51,6 @@ class VideoSource:
|
|||||||
self.obj_title_manager: TitleManager = TitleManager()
|
self.obj_title_manager: TitleManager = TitleManager()
|
||||||
self.obj_episode_manager: EpisodeManager = EpisodeManager()
|
self.obj_episode_manager: EpisodeManager = EpisodeManager()
|
||||||
|
|
||||||
def get_preview(self) -> None:
|
|
||||||
"""
|
|
||||||
Retrieves preview information of a media-id
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
response = requests.post(f"https://{self.base_name}.{self.domain}/api/titles/preview/{self.media_id}", headers = self.headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
# Collect all info about preview
|
|
||||||
self.obj_preview = PreviewManager(response.json())
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error collecting preview info: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def get_count_episodes(self):
|
def get_count_episodes(self):
|
||||||
"""
|
"""
|
||||||
Fetches the total count of episodes available for the anime.
|
Fetches the total count of episodes available for the anime.
|
@ -12,7 +12,7 @@ from Src.Util.message import start_message
|
|||||||
|
|
||||||
|
|
||||||
# Logic class
|
# Logic class
|
||||||
from .Core.Vix_player.player import VideoSource
|
from .Core.Player.vixcloud import VideoSource
|
||||||
from .Core.Util import manage_selection
|
from .Core.Util import manage_selection
|
||||||
|
|
||||||
|
|
||||||
|
@ -53,23 +53,6 @@ class VideoSource:
|
|||||||
self.obj_title_manager: TitleManager = TitleManager()
|
self.obj_title_manager: TitleManager = TitleManager()
|
||||||
self.obj_episode_manager: EpisodeManager = EpisodeManager()
|
self.obj_episode_manager: EpisodeManager = EpisodeManager()
|
||||||
|
|
||||||
def get_preview(self) -> None:
|
|
||||||
"""
|
|
||||||
Retrieves preview information of a media-id
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
response = requests.post(f"https://{self.base_name}.{self.domain}/api/titles/preview/{self.media_id}", headers=self.headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
# Collect all info about preview
|
|
||||||
self.obj_preview = PreviewManager(response.json())
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error collecting preview info: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def collect_info_seasons(self) -> None:
|
def collect_info_seasons(self) -> None:
|
||||||
"""
|
"""
|
||||||
Collect information about seasons.
|
Collect information about seasons.
|
@ -13,7 +13,7 @@ from Src.Util.message import start_message
|
|||||||
|
|
||||||
|
|
||||||
# Logic class
|
# Logic class
|
||||||
from .Core.Vix_player.player import VideoSource
|
from .Core.Player.vixcloud import VideoSource
|
||||||
|
|
||||||
|
|
||||||
# Config
|
# Config
|
||||||
|
@ -14,7 +14,7 @@ from Src.Lib.Hls.downloader import Downloader
|
|||||||
|
|
||||||
|
|
||||||
# Logic class
|
# Logic class
|
||||||
from .Core.Vix_player.player import VideoSource
|
from .Core.Player.vixcloud import VideoSource
|
||||||
from .Core.Util import manage_selection, map_episode_title
|
from .Core.Util import manage_selection, map_episode_title
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ from Src.Util.os import check_file_existence, suppress_output
|
|||||||
from Src.Util.console import console
|
from Src.Util.console import console
|
||||||
from .util import has_audio_stream, need_to_force_to_ts, check_ffmpeg_input, check_duration_v_a
|
from .util import has_audio_stream, need_to_force_to_ts, check_ffmpeg_input, check_duration_v_a
|
||||||
from .capture import capture_ffmpeg_real_time
|
from .capture import capture_ffmpeg_real_time
|
||||||
from ..M3U8.parser import M3U8_Codec
|
from ..M3U8 import M3U8_Codec
|
||||||
|
|
||||||
|
|
||||||
# Config
|
# Config
|
||||||
|
88
Src/Lib/Hls/proxyes.py
Normal file
88
Src/Lib/Hls/proxyes.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
# 09.06.24
|
||||||
|
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
|
||||||
|
# External libraries
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
# Internal utilities
|
||||||
|
from Src.Util._jsonConfig import config_manager
|
||||||
|
|
||||||
|
|
||||||
|
class ProxyManager:
|
||||||
|
def __init__(self, proxy_list=None, url=None):
|
||||||
|
"""
|
||||||
|
Initialize ProxyManager with a list of proxies and timeout.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- proxy_list: List of proxy strings
|
||||||
|
- timeout: Timeout for proxy requests
|
||||||
|
"""
|
||||||
|
self.proxy_list = proxy_list or []
|
||||||
|
self.verified_proxies = []
|
||||||
|
self.failed_proxies = {}
|
||||||
|
self.timeout = config_manager.get_float('REQUESTS', 'timeout')
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
def _check_proxy(self, proxy):
|
||||||
|
"""
|
||||||
|
Check if a single proxy is working by making a request to Google.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- proxy: Proxy string to be checked
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- Proxy string if working, None otherwise
|
||||||
|
"""
|
||||||
|
protocol = proxy.split(":")[0].lower()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(self.url, proxies={protocol: proxy}, timeout=self.timeout)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
logging.info(f"Proxy {proxy} is working.")
|
||||||
|
return proxy
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
logging.error(f"Proxy {proxy} failed: {e}")
|
||||||
|
self.failed_proxies[proxy] = time.time()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def verify_proxies(self):
|
||||||
|
"""
|
||||||
|
Verify all proxies in the list and store the working ones.
|
||||||
|
"""
|
||||||
|
logging.info("Starting proxy verification...")
|
||||||
|
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||||
|
self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
|
||||||
|
self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
|
||||||
|
logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
|
||||||
|
|
||||||
|
def get_verified_proxies(self):
|
||||||
|
"""
|
||||||
|
Get validate proxies.
|
||||||
|
"""
|
||||||
|
validate_proxy = []
|
||||||
|
|
||||||
|
for proxy in self.verified_proxies:
|
||||||
|
protocol = proxy.split(":")[0].lower()
|
||||||
|
validate_proxy.append({protocol: proxy})
|
||||||
|
|
||||||
|
return validate_proxy
|
||||||
|
|
||||||
|
|
||||||
|
def main_test_proxy(url_test):
|
||||||
|
|
||||||
|
# Get list of proxy from config.json
|
||||||
|
proxy_list = config_manager.get_list('REQUESTS', 'proxy')
|
||||||
|
|
||||||
|
# Verify proxy
|
||||||
|
manager = ProxyManager(proxy_list, url_test)
|
||||||
|
manager.verify_proxies()
|
||||||
|
|
||||||
|
# Return valid proxy
|
||||||
|
return manager.get_verified_proxies()
|
@ -7,8 +7,9 @@ import queue
|
|||||||
import threading
|
import threading
|
||||||
import logging
|
import logging
|
||||||
import binascii
|
import binascii
|
||||||
|
from queue import PriorityQueue
|
||||||
|
from urllib.parse import urljoin
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from urllib.parse import urljoin, urlparse, urlunparse
|
|
||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
@ -31,6 +32,7 @@ from ..M3U8 import (
|
|||||||
M3U8_Parser,
|
M3U8_Parser,
|
||||||
M3U8_UrlFix
|
M3U8_UrlFix
|
||||||
)
|
)
|
||||||
|
from .proxyes import main_test_proxy
|
||||||
|
|
||||||
|
|
||||||
# Warning
|
# Warning
|
||||||
@ -40,9 +42,10 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|||||||
|
|
||||||
# Config
|
# Config
|
||||||
TQDM_MAX_WORKER = config_manager.get_int('M3U8_DOWNLOAD', 'tdqm_workers')
|
TQDM_MAX_WORKER = config_manager.get_int('M3U8_DOWNLOAD', 'tdqm_workers')
|
||||||
|
TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
|
||||||
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
|
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
|
||||||
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
|
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
|
||||||
PROXY_LIST = config_manager.get_list('REQUESTS', 'proxy')
|
THERE_IS_PROXY_LIST = len(config_manager.get_list('REQUESTS', 'proxy')) > 0
|
||||||
|
|
||||||
|
|
||||||
# Variable
|
# Variable
|
||||||
@ -72,9 +75,8 @@ class M3U8_Segments:
|
|||||||
self.class_url_fixer = M3U8_UrlFix(url)
|
self.class_url_fixer = M3U8_UrlFix(url)
|
||||||
|
|
||||||
# Sync
|
# Sync
|
||||||
self.current_index = 0 # Index of the current segment to be written
|
self.queue = PriorityQueue()
|
||||||
self.segment_queue = queue.PriorityQueue() # Priority queue to maintain the order of segments
|
self.stop_event = threading.Event()
|
||||||
self.condition = threading.Condition() # Condition variable for thread synchronization
|
|
||||||
|
|
||||||
def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
|
def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
|
||||||
"""
|
"""
|
||||||
@ -151,6 +153,15 @@ class M3U8_Segments:
|
|||||||
self.class_ts_estimator.total_segments = len(self.segments)
|
self.class_ts_estimator.total_segments = len(self.segments)
|
||||||
logging.info(f"Segmnets to donwload: [{len(self.segments)}]")
|
logging.info(f"Segmnets to donwload: [{len(self.segments)}]")
|
||||||
|
|
||||||
|
# Proxy
|
||||||
|
if THERE_IS_PROXY_LIST:
|
||||||
|
console.log("[red]Validate proxy.")
|
||||||
|
self.valid_proxy = main_test_proxy(self.segments[0])
|
||||||
|
console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
|
||||||
|
|
||||||
|
if len(self.valid_proxy) == 0:
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
def get_info(self) -> None:
|
def get_info(self) -> None:
|
||||||
"""
|
"""
|
||||||
Makes a request to the index M3U8 file to get information about segments.
|
Makes a request to the index M3U8 file to get information about segments.
|
||||||
@ -169,38 +180,6 @@ class M3U8_Segments:
|
|||||||
# Parse the text from the M3U8 index file
|
# Parse the text from the M3U8 index file
|
||||||
self.parse_data(response.text)
|
self.parse_data(response.text)
|
||||||
|
|
||||||
def get_proxy(self, index):
|
|
||||||
"""
|
|
||||||
Returns the proxy configuration for the given index.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
- index (int): The index to select the proxy from the PROXY_LIST.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- dict: A dictionary containing the proxy scheme and proxy URL.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Select the proxy from the list using the index
|
|
||||||
new_proxy = PROXY_LIST[index % len(PROXY_LIST)]
|
|
||||||
proxy_scheme = new_proxy["protocol"]
|
|
||||||
|
|
||||||
# Construct the proxy URL based on the presence of user and pass keys
|
|
||||||
if "user" in new_proxy and "pass" in new_proxy:
|
|
||||||
proxy_url = f"{proxy_scheme}://{new_proxy['user']}:{new_proxy['pass']}@{new_proxy['ip']}:{new_proxy['port']}"
|
|
||||||
elif "user" in new_proxy:
|
|
||||||
proxy_url = f"{proxy_scheme}://{new_proxy['user']}@{new_proxy['ip']}:{new_proxy['port']}"
|
|
||||||
else:
|
|
||||||
proxy_url = f"{proxy_scheme}://{new_proxy['ip']}:{new_proxy['port']}"
|
|
||||||
|
|
||||||
logging.info(f"Proxy URL generated: {proxy_url}")
|
|
||||||
return {proxy_scheme: proxy_url}
|
|
||||||
|
|
||||||
except KeyError as e:
|
|
||||||
logging.error(f"KeyError: Missing required key {e} in proxy configuration.")
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"An unexpected error occurred while generating proxy URL: {e}")
|
|
||||||
|
|
||||||
def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm) -> None:
|
def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm) -> None:
|
||||||
"""
|
"""
|
||||||
Downloads a TS segment and adds it to the segment queue.
|
Downloads a TS segment and adds it to the segment queue.
|
||||||
@ -210,83 +189,72 @@ class M3U8_Segments:
|
|||||||
- index (int): The index of the segment.
|
- index (int): The index of the segment.
|
||||||
- progress_bar (tqdm): Progress counter for tracking download progress.
|
- progress_bar (tqdm): Progress counter for tracking download progress.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Generate new user agent
|
|
||||||
headers_segments['user-agent'] = get_headers()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
|
# Generate headers
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
headers_segments['user-agent'] = get_headers()
|
||||||
|
|
||||||
# Generate proxy
|
# Make request to get content
|
||||||
if len(PROXY_LIST) > 0:
|
if THERE_IS_PROXY_LIST:
|
||||||
|
proxy = self.valid_proxy[index % len(self.valid_proxy)]
|
||||||
# Make request
|
logging.info(f"Use proxy: {proxy}")
|
||||||
proxy = self.get_proxy(index)
|
|
||||||
response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT, proxies=proxy)
|
response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT, proxies=proxy)
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
# Make request
|
|
||||||
response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT)
|
response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT)
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
# Calculate duration
|
# Get response content
|
||||||
|
response.raise_for_status()
|
||||||
|
segment_content = response.content
|
||||||
|
|
||||||
|
# Update bar
|
||||||
duration = time.time() - start_time
|
duration = time.time() - start_time
|
||||||
logging.info(f"Make request to get segment: [{index} - {len(self.segments)}] in: {duration}, len data: {len(response.content)}")
|
response_size = int(response.headers.get('Content-Length', 0))
|
||||||
|
self.class_ts_estimator.update_progress_bar(response_size, duration, progress_bar)
|
||||||
|
|
||||||
if response.ok:
|
# Decrypt the segment content if decryption is needed
|
||||||
|
if self.decryption is not None:
|
||||||
|
segment_content = self.decryption.decrypt(segment_content)
|
||||||
|
|
||||||
# Get the content of the segment
|
# Add the segment to the queue
|
||||||
segment_content = response.content
|
self.queue.put((index, segment_content))
|
||||||
|
progress_bar.update(1)
|
||||||
# Update bar
|
|
||||||
self.class_ts_estimator.update_progress_bar(int(response.headers.get('Content-Length', 0)), duration, progress_bar)
|
|
||||||
|
|
||||||
# Decrypt the segment content if decryption is needed
|
|
||||||
if self.decryption is not None:
|
|
||||||
segment_content = self.decryption.decrypt(segment_content)
|
|
||||||
|
|
||||||
with self.condition:
|
|
||||||
self.segment_queue.put((index, segment_content)) # Add the segment to the queue
|
|
||||||
self.condition.notify() # Notify the writer thread that a new segment is available
|
|
||||||
else:
|
|
||||||
logging.error(f"Failed to download segment: {ts_url}")
|
|
||||||
|
|
||||||
except (HTTPError, ConnectionError, Timeout, RequestException) as e:
|
except (HTTPError, ConnectionError, Timeout, RequestException) as e:
|
||||||
|
progress_bar.update(1)
|
||||||
logging.error(f"Request-related exception while downloading segment: {e}")
|
logging.error(f"Request-related exception while downloading segment: {e}")
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"An unexpected exception occurred while download segment: {e}")
|
|
||||||
|
|
||||||
# Update bar
|
except Exception as e:
|
||||||
progress_bar.update(1)
|
progress_bar.update(1)
|
||||||
|
logging.error(f"An unexpected exception occurred while download segment: {e}")
|
||||||
|
|
||||||
def write_segments_to_file(self):
|
def write_segments_to_file(self):
|
||||||
"""
|
"""
|
||||||
Writes downloaded segments to a file in the correct order.
|
Writes downloaded segments to a file in the correct order.
|
||||||
"""
|
"""
|
||||||
with open(self.tmp_file_path, 'ab') as f:
|
with open(self.tmp_file_path, 'wb') as f:
|
||||||
while True:
|
expected_index = 0
|
||||||
with self.condition:
|
buffer = {}
|
||||||
while self.segment_queue.empty() and self.current_index < len(self.segments):
|
|
||||||
self.condition.wait() # Wait until a new segment is available or all segments are downloaded
|
|
||||||
|
|
||||||
if self.segment_queue.empty() and self.current_index >= len(self.segments):
|
while not self.stop_event.is_set() or not self.queue.empty():
|
||||||
break # Exit loop if all segments have been processed
|
try:
|
||||||
|
index, segment_content = self.queue.get(timeout=1)
|
||||||
|
|
||||||
if not self.segment_queue.empty():
|
if index == expected_index:
|
||||||
# Get the segment from the queue
|
f.write(segment_content)
|
||||||
index, segment_content = self.segment_queue.get()
|
f.flush()
|
||||||
|
expected_index += 1
|
||||||
|
|
||||||
# Write the segment to the file
|
# Write any buffered segments in order
|
||||||
if index == self.current_index:
|
while expected_index in buffer:
|
||||||
f.write(segment_content)
|
f.write(buffer.pop(expected_index))
|
||||||
self.current_index += 1
|
f.flush()
|
||||||
self.segment_queue.task_done()
|
expected_index += 1
|
||||||
else:
|
else:
|
||||||
self.segment_queue.put((index, segment_content)) # Requeue the segment if it is not the next to be written
|
buffer[index] = segment_content
|
||||||
self.condition.notify()
|
|
||||||
|
except queue.Empty:
|
||||||
|
continue
|
||||||
|
|
||||||
def download_streams(self, add_desc):
|
def download_streams(self, add_desc):
|
||||||
"""
|
"""
|
||||||
@ -296,10 +264,11 @@ class M3U8_Segments:
|
|||||||
- add_desc (str): Additional description for the progress bar.
|
- add_desc (str): Additional description for the progress bar.
|
||||||
"""
|
"""
|
||||||
if TQDM_USE_LARGE_BAR:
|
if TQDM_USE_LARGE_BAR:
|
||||||
bar_format=f"{Colors.YELLOW}Downloading {Colors.WHITE}({add_desc}{Colors.WHITE}): {Colors.RED}{{percentage:.2f}}% {Colors.MAGENTA}{{bar}} {Colors.WHITE}| {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}| {Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
|
bar_format=f"{Colors.YELLOW}Downloading {Colors.WHITE}({add_desc}{Colors.WHITE}): {Colors.RED}{{percentage:.2f}}% {Colors.MAGENTA}{{bar}} {Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] {Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
|
||||||
else:
|
else:
|
||||||
bar_format=f"{Colors.YELLOW}Proc{Colors.WHITE}: {Colors.RED}{{percentage:.2f}}% {Colors.WHITE}| {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
|
bar_format=f"{Colors.YELLOW}Proc{Colors.WHITE}: {Colors.RED}{{percentage:.2f}}% {Colors.WHITE}| {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
|
||||||
|
|
||||||
|
# Create progress bar
|
||||||
progress_bar = tqdm(
|
progress_bar = tqdm(
|
||||||
total=len(self.segments),
|
total=len(self.segments),
|
||||||
unit='s',
|
unit='s',
|
||||||
@ -307,21 +276,18 @@ class M3U8_Segments:
|
|||||||
bar_format=bar_format
|
bar_format=bar_format
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Start a separate thread to write segments to the file
|
||||||
|
writer_thread = threading.Thread(target=self.write_segments_to_file)
|
||||||
|
writer_thread.start()
|
||||||
|
|
||||||
|
# Start all workers
|
||||||
with ThreadPoolExecutor(max_workers=TQDM_MAX_WORKER) as executor:
|
with ThreadPoolExecutor(max_workers=TQDM_MAX_WORKER) as executor:
|
||||||
|
|
||||||
# Start a separate thread to write segments to the file
|
|
||||||
writer_thread = threading.Thread(target=self.write_segments_to_file)
|
|
||||||
writer_thread.start()
|
|
||||||
|
|
||||||
# Start all workers
|
|
||||||
for index, segment_url in enumerate(self.segments):
|
for index, segment_url in enumerate(self.segments):
|
||||||
|
time.sleep(TQDM_DELAY_WORKER)
|
||||||
# Submit the download task to the executor
|
|
||||||
executor.submit(self.make_requests_stream, segment_url, index, progress_bar)
|
executor.submit(self.make_requests_stream, segment_url, index, progress_bar)
|
||||||
|
|
||||||
# Wait for all segments to be downloaded
|
# Wait for all tasks to complete
|
||||||
executor.shutdown()
|
executor.shutdown(wait=True)
|
||||||
|
self.stop_event.set()
|
||||||
with self.condition:
|
writer_thread.join()
|
||||||
self.condition.notify_all() # Wake up the writer thread if it's waiting
|
progress_bar.close()
|
||||||
writer_thread.join() # Wait for the writer thread to finish
|
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
# 02.04.24
|
# 09.06.24
|
||||||
|
|
||||||
from .decryption import M3U8_Decryption
|
from .helper import (
|
||||||
from .estimator import M3U8_Ts_Estimator
|
M3U8_Decryption,
|
||||||
from .parser import M3U8_Parser, M3U8_Codec
|
M3U8_Ts_Estimator,
|
||||||
from .url_fixer import M3U8_UrlFix
|
M3U8_Parser,
|
||||||
|
M3U8_Codec,
|
||||||
|
M3U8_UrlFix
|
||||||
|
)
|
6
Src/Lib/M3U8/helper/__init__.py
Normal file
6
Src/Lib/M3U8/helper/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# 02.04.24
|
||||||
|
|
||||||
|
from .decryptor import M3U8_Decryption
|
||||||
|
from .estimator import M3U8_Ts_Estimator
|
||||||
|
from .parser import M3U8_Parser, M3U8_Codec
|
||||||
|
from .url_fixer import M3U8_UrlFix
|
@ -51,25 +51,29 @@ class M3U8_Ts_Estimator:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Calculate speed outside of the lock
|
# Calculate speed outside of the lock
|
||||||
try:
|
speed_mbps = (size_download * 4) / (duration * (1024 * 1024))
|
||||||
speed_mbps = (size_download * 8) / (duration * 1_000_000)
|
|
||||||
except ZeroDivisionError as e:
|
|
||||||
logging.error("Division by zero error while calculating speed: %s", e)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Only update shared data within the lock
|
# Add total size bytes
|
||||||
with self.lock:
|
self.ts_file_sizes.append(size)
|
||||||
self.ts_file_sizes.append(size)
|
self.now_downloaded_size += size_download
|
||||||
self.now_downloaded_size += size_download
|
self.list_speeds.append(speed_mbps)
|
||||||
self.list_speeds.append(speed_mbps)
|
|
||||||
|
|
||||||
# Calculate moving average
|
# Calculate moving average
|
||||||
smoothed_speed = sum(self.list_speeds) / len(self.list_speeds)
|
smoothed_speed = sum(self.list_speeds) / len(self.list_speeds)
|
||||||
self.smoothed_speeds.append(smoothed_speed)
|
self.smoothed_speeds.append(smoothed_speed)
|
||||||
|
|
||||||
# Update smooth speeds
|
# Update smooth speeds
|
||||||
if len(self.smoothed_speeds) > self.average_over:
|
if len(self.smoothed_speeds) > self.average_over:
|
||||||
self.smoothed_speeds.pop(0)
|
self.smoothed_speeds.pop(0)
|
||||||
|
|
||||||
|
def get_average_speed(self) -> float:
|
||||||
|
"""
|
||||||
|
Calculate the average speed from a list of speeds and convert it to megabytes per second (MB/s).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The average speed in megabytes per second (MB/s).
|
||||||
|
"""
|
||||||
|
return (sum(self.smoothed_speeds) / len(self.smoothed_speeds))
|
||||||
|
|
||||||
def calculate_total_size(self) -> str:
|
def calculate_total_size(self) -> str:
|
||||||
"""
|
"""
|
||||||
@ -96,15 +100,6 @@ class M3U8_Ts_Estimator:
|
|||||||
logging.error("An unexpected error occurred: %s", e)
|
logging.error("An unexpected error occurred: %s", e)
|
||||||
return "Error"
|
return "Error"
|
||||||
|
|
||||||
def get_average_speed(self) -> float:
|
|
||||||
"""
|
|
||||||
Calculate the average speed from a list of speeds and convert it to megabytes per second (MB/s).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
float: The average speed in megabytes per second (MB/s).
|
|
||||||
"""
|
|
||||||
return ((sum(self.smoothed_speeds) / len(self.smoothed_speeds)) / 8 ) * 10 # MB/s
|
|
||||||
|
|
||||||
def get_downloaded_size(self) -> str:
|
def get_downloaded_size(self) -> str:
|
||||||
"""
|
"""
|
||||||
Get the total downloaded size formatted as a human-readable string.
|
Get the total downloaded size formatted as a human-readable string.
|
||||||
@ -148,5 +143,5 @@ class M3U8_Ts_Estimator:
|
|||||||
else:
|
else:
|
||||||
progress_counter.set_postfix_str(
|
progress_counter.set_postfix_str(
|
||||||
f"{Colors.WHITE}[ {Colors.GREEN}{number_file_downloaded}{Colors.RED} {units_file_downloaded} "
|
f"{Colors.WHITE}[ {Colors.GREEN}{number_file_downloaded}{Colors.RED} {units_file_downloaded} "
|
||||||
f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed:.2f} {Colors.RED}MB/s"
|
f"{Colors.WHITE}| {Colors.CYAN}{average_internet_speed:.2f} {Colors.RED}Mbps"
|
||||||
)
|
)
|
@ -4,7 +4,7 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
from .lib_parser import load
|
from ..parser import load
|
||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
@ -5,7 +5,7 @@ from collections import namedtuple
|
|||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
from ..lib_parser import parser
|
from ..parser import parser
|
||||||
|
|
||||||
|
|
||||||
# Variable
|
# Variable
|
||||||
@ -13,7 +13,6 @@ StreamInfo = namedtuple('StreamInfo', ['bandwidth', 'program_id', 'resolution',
|
|||||||
Media = namedtuple('Media', ['uri', 'type', 'group_id', 'language', 'name','default', 'autoselect', 'forced', 'characteristics'])
|
Media = namedtuple('Media', ['uri', 'type', 'group_id', 'language', 'name','default', 'autoselect', 'forced', 'characteristics'])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class M3U8:
|
class M3U8:
|
||||||
"""
|
"""
|
||||||
Represents a single M3U8 playlist. Should be instantiated with the content as string.
|
Represents a single M3U8 playlist. Should be instantiated with the content as string.
|
@ -6,7 +6,7 @@ import datetime
|
|||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
from ..lib_parser import protocol
|
from ..parser import protocol
|
||||||
from ._util import (
|
from ._util import (
|
||||||
remove_quotes,
|
remove_quotes,
|
||||||
remove_quotes_parser,
|
remove_quotes_parser,
|
@ -1,86 +0,0 @@
|
|||||||
# 13.05.24
|
|
||||||
|
|
||||||
import socket
|
|
||||||
import logging
|
|
||||||
import urllib3
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
|
||||||
|
|
||||||
|
|
||||||
import warnings
|
|
||||||
warnings.filterwarnings("ignore", category=urllib3.exceptions.InsecureRequestWarning)
|
|
||||||
|
|
||||||
|
|
||||||
# Variable
|
|
||||||
url_test = "https://sc-b1-18.scws-content.net/hls/100/b/d3/bd3a430d-0a13-4bec-8fcc-ea41af183555/audio/ita/0010-0100.ts?token=CiEPTIyvEoTkGk3szgDu9g&expires=1722801022"
|
|
||||||
|
|
||||||
|
|
||||||
def get_ip_from_url(url):
|
|
||||||
"""
|
|
||||||
Extracts the IP address from a given URL.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url (str): The URL from which to extract the IP address.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str or None: The extracted IP address if successful, otherwise None.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
parsed_url = urlparse(url)
|
|
||||||
if not parsed_url.hostname:
|
|
||||||
logging.error(f"Invalid URL: {url}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
ip_address = socket.gethostbyname(parsed_url.hostname)
|
|
||||||
return ip_address
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def replace_random_number(url, random_number):
|
|
||||||
"""
|
|
||||||
Replaces a random number in the URL.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url (str): The URL in which to replace the random number.
|
|
||||||
random_number (int): The random number to replace in the URL.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: The modified URL with the random number replaced.
|
|
||||||
"""
|
|
||||||
parsed_url = urlparse(url)
|
|
||||||
parts = parsed_url.netloc.split('.')
|
|
||||||
prefix = None
|
|
||||||
|
|
||||||
for i, part in enumerate(parts):
|
|
||||||
if '-' in part and part.startswith("sc-"):
|
|
||||||
prefix = part.split('-')[0] + '-' + part.split('-')[1] + '-'
|
|
||||||
new_part = prefix + f"{random_number:02d}"
|
|
||||||
parts[i] = new_part
|
|
||||||
break
|
|
||||||
|
|
||||||
new_netloc = '.'.join(parts)
|
|
||||||
return urlunparse((parsed_url.scheme, new_netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment))
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""
|
|
||||||
Main function to test the URL manipulation.
|
|
||||||
"""
|
|
||||||
valid_ip = []
|
|
||||||
|
|
||||||
for i in range(1, 36):
|
|
||||||
try:
|
|
||||||
ip = get_ip_from_url(replace_random_number(url_test, i))
|
|
||||||
|
|
||||||
if ip:
|
|
||||||
valid_ip.append(ip)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error: {e}")
|
|
||||||
pass
|
|
||||||
|
|
||||||
print(f"Valid IP addresses: {sorted(valid_ip, reverse=True)}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -13,13 +13,11 @@ def read_file(file_path):
|
|||||||
return m3u8_content
|
return m3u8_content
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Import
|
# Import
|
||||||
from Src.Lib.M3U8.lib_parser import M3U8
|
from Src.Lib.M3U8.parser import M3U8
|
||||||
from Src.Lib.M3U8 import M3U8_Parser
|
from Src.Lib.M3U8 import M3U8_Parser
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Test data
|
# Test data
|
||||||
obj_m3u8_parser = M3U8_Parser()
|
obj_m3u8_parser = M3U8_Parser()
|
||||||
base_path_file = os.path.join('Test', 'data', 'm3u8')
|
base_path_file = os.path.join('Test', 'data', 'm3u8')
|
||||||
|
@ -17,7 +17,8 @@
|
|||||||
"proxy": []
|
"proxy": []
|
||||||
},
|
},
|
||||||
"M3U8_DOWNLOAD": {
|
"M3U8_DOWNLOAD": {
|
||||||
"tdqm_workers": 4,
|
"tdqm_workers": 3,
|
||||||
|
"tqdm_delay": 0.01,
|
||||||
"tqdm_use_large_bar": true,
|
"tqdm_use_large_bar": true,
|
||||||
"download_video": true,
|
"download_video": true,
|
||||||
"download_audio": true,
|
"download_audio": true,
|
||||||
@ -45,6 +46,6 @@
|
|||||||
"SITE": {
|
"SITE": {
|
||||||
"streamingcommunity": "foo",
|
"streamingcommunity": "foo",
|
||||||
"animeunity": "to",
|
"animeunity": "to",
|
||||||
"altadefinizione": "food"
|
"altadefinizione": "vodka"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
requests
|
requests
|
||||||
bs4
|
bs4
|
||||||
certifi
|
|
||||||
tqdm
|
|
||||||
rich
|
rich
|
||||||
|
tqdm
|
||||||
unidecode
|
unidecode
|
||||||
fake-useragent
|
fake-useragent
|
Loading…
x
Reference in New Issue
Block a user