Fix use of proxy.

This commit is contained in:
Lovi 2024-06-14 11:41:07 +02:00
parent 5337a7561a
commit d7751c20a8
3 changed files with 64 additions and 52 deletions

View File

@ -1,6 +1,7 @@
# 09.06.24
import os
import sys
import time
import logging
from concurrent.futures import ThreadPoolExecutor
@ -12,6 +13,7 @@ import httpx
# Internal utilities
from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.os import check_file_existence
@ -26,7 +28,6 @@ class ProxyManager:
"""
self.proxy_list = proxy_list or []
self.verified_proxies = []
self.failed_proxies = {}
self.timeout = config_manager.get_float('REQUESTS', 'timeout')
self.url = url
@ -42,17 +43,18 @@ class ProxyManager:
"""
protocol = proxy.split(":")[0].lower()
protocol = f'{protocol}://'
proxy = {protocol: proxy, "https://": proxy}
try:
response = httpx.get(self.url, proxies={protocol: proxy}, timeout=self.timeout)
with httpx.Client(proxies=proxy, verify=False) as client:
response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
if response.status_code == 200:
logging.info(f"Proxy {proxy} is working.")
return proxy
if response.status_code == 200:
logging.info(f"Proxy {proxy} is working.")
return proxy
except Exception as e:
logging.error(f"Proxy {proxy} failed: {e}")
self.failed_proxies[proxy] = time.time()
logging.error(f"Test proxy {proxy} failed: {e}")
return None
def verify_proxies(self):
@ -70,15 +72,14 @@ class ProxyManager:
"""
Get validate proxies.
"""
validate_proxy = []
for proxy in self.verified_proxies:
protocol = proxy.split(":")[0].lower()
protocol = f'{protocol}://' # For httpx
validate_proxy.append({protocol: proxy})
return validate_proxy
if len(self.verified_proxies) > 0:
return self.verified_proxies
else:
logging.error("Cant find valid proxy.")
sys.exit(0)
def main_test_proxy(url_test):
@ -86,9 +87,6 @@ def main_test_proxy(url_test):
if check_file_existence(path_file_proxt_list):
# Write test to pass THERE IS PROXY on config.json for segments
config_manager.set_key("REQUESTS", "proxy", ["192.168.1.1"])
# Read file
with open(path_file_proxt_list, 'r') as file:
ip_addresses = file.readlines()

View File

@ -95,6 +95,7 @@ class M3U8_Segments:
self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
logging.info(f"Uri key: {key_uri}")
# Make request to get porxy
try:
response = httpx.get(key_uri, headers=headers_index)
response.raise_for_status()
@ -190,41 +191,48 @@ class M3U8_Segments:
- progress_bar (tqdm): Progress counter for tracking download progress.
"""
try:
# Generate headers
start_time = time.time()
# Generate headers
start_time = time.time()
# Make request to get content
if THERE_IS_PROXY_LIST:
proxy = self.valid_proxy[index % len(self.valid_proxy)]
logging.info(f"Use proxy: {proxy}")
# Make request to get content
if THERE_IS_PROXY_LIST:
proxy = self.valid_proxy[index % len(self.valid_proxy)]
logging.info(f"Use proxy: {proxy}")
with httpx.Client(proxies=proxy, verify=False) as client:
#print(client.get("https://api.ipify.org/?format=json").json())
if 'key_base_url' in self.__dict__:
response = client.get(ts_url, headers=random_headers(self.key_base_url), timeout=REQUEST_TIMEOUT)
else:
response = client.get(ts_url, headers={'user-agent': get_headers()}, timeout=REQUEST_TIMEOUT)
else:
if 'key_base_url' in self.__dict__:
response = httpx.get(ts_url, headers=random_headers(self.key_base_url), verify=False, timeout=REQUEST_TIMEOUT)
else:
response = httpx.get(ts_url, headers={'user-agent': get_headers()}, verify=False, timeout=REQUEST_TIMEOUT)
# Get response content
response.raise_for_status()
segment_content = response.content
# Update bar
duration = time.time() - start_time
response_size = int(response.headers.get('Content-Length', 0))
self.class_ts_estimator.update_progress_bar(response_size, duration, progress_bar)
if 'key_base_url' in self.__dict__:
response = httpx.get(ts_url, headers=random_headers(self.key_base_url), proxy=proxy, verify=False, timeout=REQUEST_TIMEOUT)
else:
response = httpx.get(ts_url, headers={'user-agent': get_headers()}, proxy=proxy, verify=False, timeout=REQUEST_TIMEOUT)
else:
if 'key_base_url' in self.__dict__:
response = httpx.get(ts_url, headers=random_headers(self.key_base_url), verify=False, timeout=REQUEST_TIMEOUT)
else:
response = httpx.get(ts_url, headers={'user-agent': get_headers()}, verify=False, timeout=REQUEST_TIMEOUT)
# Decrypt the segment content if decryption is needed
if self.decryption is not None:
segment_content = self.decryption.decrypt(segment_content)
# Get response content
response.raise_for_status()
segment_content = response.content
# Add the segment to the queue
self.queue.put((index, segment_content))
progress_bar.update(1)
# Update bar
duration = time.time() - start_time
response_size = int(response.headers.get('Content-Length', 0))
self.class_ts_estimator.update_progress_bar(response_size, duration, progress_bar)
# Decrypt the segment content if decryption is needed
if self.decryption is not None:
segment_content = self.decryption.decrypt(segment_content)
# Add the segment to the queue
self.queue.put((index, segment_content))
progress_bar.update(1)
except Exception as e:
console.print(f"Failed to download '{ts_url}', status error: {e}.")
def write_segments_to_file(self):
"""

View File

@ -15,8 +15,9 @@
"index": {
"user-agent": ""
},
"proxy_start_min": 0.6,
"proxy_start_max": 1.0
"proxy_start_min": 0.1,
"proxy_start_max": 0.4,
"proxy": []
},
"M3U8_DOWNLOAD": {
"tdqm_workers": 2,
@ -25,10 +26,15 @@
"download_video": true,
"download_audio": true,
"merge_audio": true,
"specific_list_audio": ["ita"],
"specific_list_audio": [
"ita"
],
"download_sub": true,
"merge_subs": true,
"specific_list_subtitles": ["eng", "spa"],
"specific_list_subtitles": [
"eng",
"spa"
],
"cleanup_tmp_folder": true,
"create_report": false
},