Add verify for all requests (#345)

* Update downloader.py

* Update segments.py

* Update config_json.py

* Update downloader.py

* Update sweetpixel.py

* Update ddl.py

* Update hdplayer.py

* Update hdplayer.py

* Update maxstream.py

* Update maxstream.py

* Update mediapolisvod.py

* Update mixdrop.py

* Update mixdrop.py

* Update supervideo.py

* Update vixcloud.py

* Update vixcloud.py

* Update update.py
This commit is contained in:
ciccioxm3 2025-06-26 14:06:09 +02:00 committed by github-actions[bot]
parent fd6f2151bc
commit 1e3f964167
13 changed files with 66 additions and 58 deletions

View File

@ -6,10 +6,10 @@
"time_change": "2025-03-19 12:20:19"
},
"cb01new": {
"domain": "world",
"full_url": "https://cb01net.world/",
"old_domain": "rest",
"time_change": "2025-06-25 15:23:57"
"domain": "today",
"full_url": "https://cb01net.today/",
"old_domain": "world",
"time_change": "2025-07-01 07:22:25"
},
"animeunity": {
"domain": "so",
@ -42,21 +42,21 @@
"time_change": "2025-04-29 12:30:30"
},
"altadefinizione": {
"domain": "blog",
"full_url": "https://altadefinizionegratis.blog/",
"old_domain": "spa",
"time_change": "2025-06-24 18:30:00"
"domain": "life",
"full_url": "https://altadefinizionegratis.life/",
"old_domain": "blog",
"time_change": "2025-06-30 21:19:33"
},
"streamingcommunity": {
"domain": "shop",
"full_url": "https://streamingunity.shop/",
"old_domain": "cam",
"time_change": "2025-06-19 19:15:09"
"domain": "life",
"full_url": "https://streamingunity.life/",
"old_domain": "shop",
"time_change": "2025-06-29 16:24:39"
},
"altadefinizionegratis": {
"domain": "blog",
"full_url": "https://altadefinizionegratis.blog/",
"old_domain": "vip",
"time_change": "2025-06-24 18:30:03"
"domain": "life",
"full_url": "https://altadefinizionegratis.life/",
"old_domain": "blog",
"time_change": "2025-06-30 21:19:36"
}
}

View File

@ -15,6 +15,7 @@ from StreamingCommunity.Util.headers import get_userAgent
# Variable
max_timeout = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
class VideoSource:
@ -41,7 +42,8 @@ class VideoSource:
url=url,
headers=self.headers,
cookies=self.cookie,
timeout=max_timeout
timeout=max_timeout,
verify=REQUEST_VERIFY
)
response.raise_for_status()
@ -77,4 +79,4 @@ class VideoSource:
logging.error("Failed to retrieve content from the URL.")
except Exception as e:
logging.error(f"An error occurred while parsing the playlist: {e}")
logging.error(f"An error occurred while parsing the playlist: {e}")

View File

@ -14,11 +14,12 @@ from StreamingCommunity.Util.config_json import config_manager
# Variable
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
class VideoSource:
def __init__(self, proxy=None):
self.client = httpx.Client(headers={'user-agent': get_userAgent()}, timeout=MAX_TIMEOUT, proxy=proxy)
self.client = httpx.Client(headers={'user-agent': get_userAgent()}, timeout=MAX_TIMEOUT, proxy=proxy, verify=REQUEST_VERIFY)
def extractLinkHdPlayer(self, response):
"""Extract iframe source from the page."""

View File

@ -18,7 +18,7 @@ from StreamingCommunity.Util.headers import get_userAgent
# Variable
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
class VideoSource:
def __init__(self, url: str):
@ -39,7 +39,7 @@ class VideoSource:
Sends a request to the initial URL and extracts the redirect URL.
"""
try:
response = httpx.get(self.url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT)
response = httpx.get(self.url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
# Extract the redirect URL from the HTML
@ -58,7 +58,7 @@ class VideoSource:
Sends a request to the redirect URL and extracts the Maxstream URL.
"""
try:
response = httpx.get(self.redirect_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT)
response = httpx.get(self.redirect_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
# Extract the Maxstream URL from the HTML
@ -77,12 +77,12 @@ class VideoSource:
# Make request to stayonline api
data = {'id': self.redirect_url.split("/")[-2], 'ref': ''}
response = httpx.post('https://stayonline.pro/ajax/linkEmbedView.php', headers=headers, data=data)
response = httpx.post('https://stayonline.pro/ajax/linkEmbedView.php', headers=headers, data=data, verify=REQUEST_VERIFY)
response.raise_for_status()
uprot_url = response.json()['data']['value']
# Retry getting maxtstream url
response = httpx.get(uprot_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT)
response = httpx.get(uprot_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
maxstream_url = soup.find("a").get("href")
@ -104,7 +104,7 @@ class VideoSource:
Sends a request to the Maxstream URL and extracts the .m3u8 file URL.
"""
try:
response = httpx.get(self.maxstream_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT)
response = httpx.get(self.maxstream_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
@ -138,4 +138,4 @@ class VideoSource:
"""
self.get_redirect_url()
self.get_maxstream_url()
return self.get_m3u8_url()
return self.get_m3u8_url()

View File

@ -12,7 +12,7 @@ from StreamingCommunity.Util.headers import get_headers
# Variable
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
class VideoSource:
@ -29,7 +29,7 @@ class VideoSource:
return "Error: Unable to determine video JSON URL"
try:
response = httpx.get(video_url, headers=get_headers(), timeout=MAX_TIMEOUT)
response = httpx.get(video_url, headers=get_headers(), timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
if response.status_code != 200:
return f"Error: Failed to fetch video data (Status: {response.status_code})"
@ -50,7 +50,7 @@ class VideoSource:
'cont': element_key,
'output': '62',
}
stream_response = httpx.get('https://mediapolisvod.rai.it/relinker/relinkerServlet.htm', params=params, headers=get_headers(), timeout=MAX_TIMEOUT)
stream_response = httpx.get('https://mediapolisvod.rai.it/relinker/relinkerServlet.htm', params=params, headers=get_headers(), timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
if stream_response.status_code != 200:
return f"Error: Failed to fetch stream URL (Status: {stream_response.status_code})"
@ -61,4 +61,4 @@ class VideoSource:
return m3u8_url
except Exception as e:
return f"Error: {str(e)}"
return f"Error: {str(e)}"

View File

@ -17,7 +17,7 @@ from StreamingCommunity.Util.headers import get_userAgent
# Variable
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
class VideoSource:
STAYONLINE_BASE_URL = "https://stayonline.pro"
@ -45,7 +45,7 @@ class VideoSource:
def get_redirect_url(self) -> str:
"""Extract the stayonline redirect URL from the initial page."""
try:
response = httpx.get(self.url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT)
response = httpx.get(self.url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
@ -68,7 +68,7 @@ class VideoSource:
raise ValueError("Redirect URL not set. Call get_redirect_url first.")
try:
response = httpx.get(self.redirect_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT)
response = httpx.get(self.redirect_url, headers=self.headers, follow_redirects=True, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
@ -89,7 +89,7 @@ class VideoSource:
self.headers['referer'] = f'{self.STAYONLINE_BASE_URL}/l/{link_id}/'
data = {'id': link_id, 'ref': ''}
response = httpx.post(f'{self.STAYONLINE_BASE_URL}/ajax/linkView.php', headers=self.headers, data=data, timeout=MAX_TIMEOUT)
response = httpx.post(f'{self.STAYONLINE_BASE_URL}/ajax/linkView.php', headers=self.headers, data=data, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
return response.json()['data']['value']
@ -128,7 +128,8 @@ class VideoSource:
response = httpx.get(
f'{self.MIXDROP_BASE_URL}/e/{video_id}',
headers=self._get_mixdrop_headers(),
timeout=MAX_TIMEOUT
timeout=MAX_TIMEOUT,
verify=REQUEST_VERIFY
)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")

View File

@ -17,6 +17,7 @@ from StreamingCommunity.Util.headers import get_headers
# Variable
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
class VideoSource:
@ -41,7 +42,7 @@ class VideoSource:
- str: The response content if successful, None otherwise.
"""
try:
response = requests.get(url, headers=self.headers, timeout=MAX_TIMEOUT, impersonate="chrome110")
response = requests.get(url, headers=self.headers, timeout=MAX_TIMEOUT, impersonate="chrome110", verify=REQUEST_VERIFY)
if response.status_code >= 400:
logging.error(f"Request failed with status code: {response.status_code}")
return None
@ -161,4 +162,4 @@ class VideoSource:
except Exception as e:
logging.error(f"An error occurred: {e}")
return None
return None

View File

@ -14,7 +14,7 @@ from StreamingCommunity.Util.headers import get_userAgent
# Variable
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
class VideoSource:
def __init__(self, full_url, episode_data, session_id, csrf_token):
@ -30,7 +30,8 @@ class VideoSource:
cookies={"sessionId": session_id},
headers={"User-Agent": get_userAgent(), "csrf-token": csrf_token},
base_url=full_url,
timeout=MAX_TIMEOUT
timeout=MAX_TIMEOUT,
verify=REQUEST_VERIFY
)
def get_playlist(self):
@ -46,4 +47,4 @@ class VideoSource:
except Exception as e:
logging.error(f"Error in new API system: {e}")
return None
return None

View File

@ -20,6 +20,7 @@ from .Helper.Vixcloud.js_parser import JavaScriptParser
# Variable
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify')
console = Console()
@ -57,7 +58,7 @@ class VideoSource:
}
try:
response = httpx.get(f"{self.url}/iframe/{self.media_id}", headers=self.headers, params=params, timeout=MAX_TIMEOUT, proxy=self.proxy)
response = httpx.get(f"{self.url}/iframe/{self.media_id}", headers=self.headers, params=params, timeout=MAX_TIMEOUT, proxy=self.proxy, verify=REQUEST_VERIFY)
response.raise_for_status()
# Parse response with BeautifulSoup to get iframe source
@ -100,7 +101,7 @@ class VideoSource:
"""
try:
if self.iframe_src is not None:
response = httpx.get(self.iframe_src, headers=self.headers, timeout=MAX_TIMEOUT)
response = httpx.get(self.iframe_src, headers=self.headers, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
# Parse response with BeautifulSoup to get content
@ -178,7 +179,7 @@ class VideoSourceAnime(VideoSource):
str: Parsed script content
"""
try:
response = httpx.get(f"{self.url}/embed-url/{episode_id}", headers=self.headers, timeout=MAX_TIMEOUT)
response = httpx.get(f"{self.url}/embed-url/{episode_id}", headers=self.headers, timeout=MAX_TIMEOUT, verify=REQUEST_VERIFY)
response.raise_for_status()
# Extract and clean embed URL
@ -186,7 +187,7 @@ class VideoSourceAnime(VideoSource):
self.iframe_src = embed_url
# Fetch video content using embed URL
video_response = httpx.get(embed_url)
video_response = httpx.get(embed_url, verify=REQUEST_VERIFY)
video_response.raise_for_status()
# Parse response with BeautifulSoup to get content of the scriot
@ -198,4 +199,4 @@ class VideoSourceAnime(VideoSource):
except Exception as e:
logging.error(f"Error fetching embed URL: {e}")
return None
return None

View File

@ -110,7 +110,7 @@ class M3U8_Segments:
self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
try:
client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT}
client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT, 'verify': REQUEST_VERIFY}
response = httpx.get(url=key_uri, **client_params)
response.raise_for_status()
@ -158,7 +158,7 @@ class M3U8_Segments:
"""
if self.is_index_url:
try:
client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT}
client_params = {'headers': {'User-Agent': get_userAgent()}, 'timeout': MAX_TIMEOOUT, 'verify': REQUEST_VERIFY}
response = httpx.get(self.url, **client_params, follow_redirects=True)
response.raise_for_status()
@ -202,7 +202,8 @@ class M3U8_Segments:
'headers': {'User-Agent': get_userAgent()},
'timeout': SEGMENT_MAX_TIMEOUT,
'follow_redirects': True,
'http2': False
'http2': False,
'verify': REQUEST_VERIFY
}
return httpx.Client(**client_params)
@ -463,4 +464,4 @@ class M3U8_Segments:
f"[white]Failed segments: [red]{self.info_nFailed}")
if self.info_nRetry > len(self.segments) * 0.3:
console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")
console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")

View File

@ -88,7 +88,7 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: dict = No
return None, False
if GET_ONLY_LINK:
console.print(f"URL: {url}[/bold red]")
console.print(f"[bold red]URL: {url}[/bold red]")
return path, True
if not (url.lower().startswith('http://') or url.lower().startswith('https://')):
@ -115,7 +115,7 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: dict = No
os.makedirs(os.path.dirname(path), exist_ok=True)
try:
with httpx.Client() as client:
with httpx.Client(verify=REQUEST_VERIFY) as client:
with client.stream("GET", url, headers=headers) as response:
response.raise_for_status()
total = int(response.headers.get('content-length', 0))
@ -188,4 +188,4 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: dict = No
return None, interrupt_handler.kill_download
finally:
signal.signal(signal.SIGINT, original_handler)
signal.signal(signal.SIGINT, original_handler)

View File

@ -31,7 +31,7 @@ async def fetch_github_data(client, url):
url=url,
headers={'user-agent': get_userAgent()},
timeout=config_manager.get_int("REQUESTS", "timeout"),
allow_redirects=True
follow_redirects=True
)
return response.json()

View File

@ -140,7 +140,7 @@ class ConfigManager:
console.print(f"[bold cyan]Downloading reference configuration:[/bold cyan] [green]{self.reference_config_url}[/green]")
try:
response = requests.get(self.reference_config_url, timeout=8, headers={'User-Agent': get_userAgent()})
response = requests.get(self.reference_config_url, timeout=8, headers={'User-Agent': get_userAgent()}, verify=self.get_bool('REQUESTS', 'verify'))
if response.status_code == 200:
with open(self.file_path, 'wb') as f:
@ -275,8 +275,8 @@ class ConfigManager:
}
try:
console.print(f"[bold cyan]Retrieving site data from GitHub:[/bold cyan]")
response = requests.get(domains_github_url, timeout=8, headers=headers)
console.print("[bold cyan]Retrieving site data from GitHub:[/bold cyan]")
response = requests.get(domains_github_url, timeout=8, headers=headers, verify=self.get_bool('REQUESTS', 'verify'))
if response.ok:
self.configSite = response.json()
@ -344,7 +344,7 @@ class ConfigManager:
try:
logging.info(f"Downloading {filename} from {url}...")
console.print(f"[bold cyan]File download:[/bold cyan] {os.path.basename(filename)}")
response = requests.get(url, timeout=8, headers={'User-Agent': get_userAgent()})
response = requests.get(url, timeout=8, headers={'User-Agent': get_userAgent()}, verify=self.get_bool('REQUESTS', 'verify'))
if response.status_code == 200:
with open(filename, 'wb') as f:
@ -570,4 +570,4 @@ def get_use_large_bar():
# Initialize the ConfigManager when the module is imported
config_manager = ConfigManager()
config_manager = ConfigManager()