From 44e81d7f5ed006d0a55e725201ad8cb5a9883923 Mon Sep 17 00:00:00 2001
From: Lovi <62809003+Lovi-0@users.noreply.github.com>
Date: Sun, 27 Oct 2024 17:20:55 +0100
Subject: [PATCH] Add timeout all site, add type category, add recall home for
hls.
---
README.md | 14 +-----
Src/Api/1337xx/__init__.py | 1 +
Src/Api/1337xx/site.py | 16 ++++++-
Src/Api/Template/Util/get_domain.py | 3 +-
Src/Api/altadefinizione/Player/supervideo.py | 7 ++-
Src/Api/altadefinizione/__init__.py | 1 +
Src/Api/altadefinizione/film.py | 18 +++++---
Src/Api/altadefinizione/site.py | 15 ++++++-
Src/Api/animeunity/Core/Player/vixcloud.py | 26 ++++++++---
Src/Api/animeunity/__init__.py | 1 +
Src/Api/animeunity/site.py | 18 +++++++-
Src/Api/bitsearch/__init__.py | 1 +
Src/Api/bitsearch/site.py | 15 ++++++-
Src/Api/cb01/Player/maxstream.py | 27 +++++++++--
Src/Api/cb01/__init__.py | 1 +
Src/Api/cb01/site.py | 16 ++++++-
Src/Api/ddlstreamitaly/Player/ddl.py | 9 +++-
Src/Api/ddlstreamitaly/__init__.py | 1 +
Src/Api/ddlstreamitaly/site.py | 15 ++++++-
Src/Api/guardaserie/Player/supervideo.py | 12 ++++-
Src/Api/guardaserie/__init__.py | 1 +
Src/Api/guardaserie/site.py | 15 ++++++-
Src/Api/mostraguarda/__init__.py | 1 +
Src/Api/mostraguarda/film.py | 26 +++++------
.../Core/Player/vixcloud.py | 38 +++++++++++-----
Src/Api/streamingcommunity/__init__.py | 1 +
Src/Api/streamingcommunity/site.py | 17 +++++--
Src/Lib/Downloader/HLS/downloader.py | 17 +++++--
Src/Lib/Downloader/HLS/segments.py | 38 +++++++++++++---
Src/Lib/Downloader/MP4/downloader.py | 2 +-
config.json | 2 +-
run.py | 45 ++++++++++++-------
32 files changed, 317 insertions(+), 103 deletions(-)
diff --git a/README.md b/README.md
index 2cc81f5..13c083e 100644
--- a/README.md
+++ b/README.md
@@ -153,7 +153,7 @@ You can change some behaviors by tweaking the configuration file.
REQUESTS
* **timeout**: The timeout value for requests.
- - **Default Value**: `10`
+ - **Default Value**: `15`
* **verify_ssl**: Whether to verify SSL certificates.
- **Default Value**: `false`
@@ -165,22 +165,10 @@ You can change some behaviors by tweaking the configuration file.
M3U8_DOWNLOAD
- * **tdqm_workers**: The number of workers that will cooperate to download .ts files. **A high value may slow down your PC**
- - **Default Value**: `30`
-
* **tqdm_use_large_bar**: Whether to use large progress bars during downloads (Downloading %desc: %percentage:.2f %bar %elapsed < %remaining %postfix
- **Default Value**: `true`
- **Example Value**: `false` with Proc: %percentage:.2f %remaining %postfix
- * **download_video**: Whether to download video streams.
- - **Default Value**: `true`
-
- * **download_audio**: Whether to download audio streams.
- - **Default Value**: `true`
-
- * **download_sub**: Whether to download subtitle streams.
- - **Default Value**: `true`
-
* **specific_list_audio**: A list of specific audio languages to download.
- **Example Value**: `['ita']`
diff --git a/Src/Api/1337xx/__init__.py b/Src/Api/1337xx/__init__.py
index ca718c4..2dfb7b8 100644
--- a/Src/Api/1337xx/__init__.py
+++ b/Src/Api/1337xx/__init__.py
@@ -11,6 +11,7 @@ from .title import download_title
# Variable
indice = 8
+_use_for = "film_serie"
_deprecate = False
diff --git a/Src/Api/1337xx/site.py b/Src/Api/1337xx/site.py
index 6c04b9e..02139b0 100644
--- a/Src/Api/1337xx/site.py
+++ b/Src/Api/1337xx/site.py
@@ -7,6 +7,8 @@ from unidecode import unidecode
# Internal utilities
+from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -33,11 +35,21 @@ def title_search(word_to_search: str) -> int:
"""
# Find new domain if prev dont work
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain_to_use, _ = search_domain(SITE_NAME, f"https://{SITE_NAME}")
# Construct the full site URL and load the search page
- response = httpx.get(f"https://{SITE_NAME}.{domain_to_use}/search/{unidecode(word_to_search)}/1/", headers={'user-agent': get_headers()}, follow_redirects=True)
- response.raise_for_status()
+ try:
+ response = httpx.get(
+ url=f"https://{SITE_NAME}.{domain_to_use}/search/{unidecode(word_to_search)}/1/",
+ headers={'user-agent': get_headers()},
+ follow_redirects=True,
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Create soup and find table
soup = BeautifulSoup(response.text, "html.parser")
diff --git a/Src/Api/Template/Util/get_domain.py b/Src/Api/Template/Util/get_domain.py
index d79fbcf..9503c89 100644
--- a/Src/Api/Template/Util/get_domain.py
+++ b/Src/Api/Template/Util/get_domain.py
@@ -70,13 +70,14 @@ def search_domain(site_name: str, base_url: str):
"""
# Extract config domain
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain = str(config_manager.get_dict("SITE", site_name)['domain'])
console.print(f"[cyan]Test site[white]: [red]{base_url}.{domain}")
try:
# Test the current domain
- response_follow = httpx.get(f"{base_url}.{domain}", headers={'user-agent': get_headers()}, timeout=4, follow_redirects=True)
+ response_follow = httpx.get(f"{base_url}.{domain}", headers={'user-agent': get_headers()}, timeout=max_timeout, follow_redirects=True)
console.print(f"[cyan]Response site[white]: [red]{response_follow.status_code}")
response_follow.raise_for_status()
diff --git a/Src/Api/altadefinizione/Player/supervideo.py b/Src/Api/altadefinizione/Player/supervideo.py
index 4d2dcb2..ce53e26 100644
--- a/Src/Api/altadefinizione/Player/supervideo.py
+++ b/Src/Api/altadefinizione/Player/supervideo.py
@@ -11,9 +11,14 @@ from bs4 import BeautifulSoup
# Internal utilities
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
+# Variable
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
+
+
class VideoSource:
def __init__(self, url: str):
"""
@@ -46,7 +51,7 @@ class VideoSource:
"""
try:
- response = httpx.get(url, headers=self.headers, follow_redirects=True)
+ response = httpx.get(url, headers=self.headers, follow_redirects=True, timeout=max_timeout)
response.raise_for_status()
return response.text
diff --git a/Src/Api/altadefinizione/__init__.py b/Src/Api/altadefinizione/__init__.py
index 66235b4..2089be1 100644
--- a/Src/Api/altadefinizione/__init__.py
+++ b/Src/Api/altadefinizione/__init__.py
@@ -11,6 +11,7 @@ from .film import download_film
# Variable
indice = 2
+_use_for = "film"
_deprecate = False
diff --git a/Src/Api/altadefinizione/film.py b/Src/Api/altadefinizione/film.py
index b32d2f8..579e1d4 100644
--- a/Src/Api/altadefinizione/film.py
+++ b/Src/Api/altadefinizione/film.py
@@ -1,14 +1,15 @@
# 26.05.24
import os
-import sys
-import logging
+import time
# Internal utilities
+from Src.Util.console import console, msg
from Src.Util.message import start_message
-from Src.Util.console import console
+from Src.Util.call_stack import get_call_stack
from Src.Lib.Downloader import HLS_Downloader
+from ..Template import execute_search
# Logic class
@@ -44,7 +45,10 @@ def download_film(select_title: MediaItem):
master_playlist = video_source.get_playlist()
# Download the film using the m3u8 playlist, and output filename
- HLS_Downloader(
- m3u8_playlist = master_playlist,
- output_filename = os.path.join(mp4_path, mp4_name)
- ).start()
+ if HLS_Downloader(m3u8_playlist = master_playlist, output_filename = os.path.join(mp4_path, mp4_name)).start() == 404:
+ time.sleep(2)
+
+ # Re call search function
+ if msg.ask("[green]Do you want to continue [white]([red]y[white])[green] or return at home[white]([red]n[white]) ", choices=['y', 'n'], default='y', show_choices=True) == "n":
+ frames = get_call_stack()
+ execute_search(frames[-4])
diff --git a/Src/Api/altadefinizione/site.py b/Src/Api/altadefinizione/site.py
index 5644795..f70d364 100644
--- a/Src/Api/altadefinizione/site.py
+++ b/Src/Api/altadefinizione/site.py
@@ -7,6 +7,8 @@ from unidecode import unidecode
# Internal utilities
+from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -31,11 +33,20 @@ def title_search(title_search: str) -> int:
"""
# Find new domain if prev dont work
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain_to_use, _ = search_domain(SITE_NAME, f"https://{SITE_NAME}")
# Send request to search for title
- response = httpx.get(f"https://{SITE_NAME}.{domain_to_use}/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3", headers={'user-agent': get_headers()})
- response.raise_for_status()
+ try:
+ response = httpx.get(
+ url=f"https://{SITE_NAME}.{domain_to_use}/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3",
+ headers={'user-agent': get_headers()},
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Create soup and find table
soup = BeautifulSoup(response.text, "html.parser")
diff --git a/Src/Api/animeunity/Core/Player/vixcloud.py b/Src/Api/animeunity/Core/Player/vixcloud.py
index d4ecde3..8736594 100644
--- a/Src/Api/animeunity/Core/Player/vixcloud.py
+++ b/Src/Api/animeunity/Core/Player/vixcloud.py
@@ -21,6 +21,7 @@ from ..Class.WindowType import WindowVideo, WindowParameter, DynamicJSONConverte
# Variable
from ...costant import SITE_NAME
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
class VideoSource:
@@ -57,7 +58,11 @@ class VideoSource:
"""
try:
- response = httpx.get(f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/", headers=self.headers)
+ response = httpx.get(
+ url=f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/",
+ headers=self.headers,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Parse JSON response and return episode count
@@ -84,7 +89,12 @@ class VideoSource:
"end_range": index_ep + 1
}
- response = httpx.get(f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/{index_ep}", headers=self.headers, params=params, timeout=5)
+ response = httpx.get(
+ url=f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/{index_ep}",
+ headers=self.headers,
+ params=params,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Return information about the episode
@@ -107,7 +117,11 @@ class VideoSource:
"""
try:
- response = httpx.get(f"https://www.{self.base_name}.{self.domain}/embed-url/{episode_id}", headers=self.headers)
+ response = httpx.get(
+ url=f"https://www.{self.base_name}.{self.domain}/embed-url/{episode_id}",
+ headers=self.headers,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Extract and clean embed URL
@@ -182,8 +196,8 @@ class VideoSource:
final_params["h"] = "1"
# Construct the new query string and final URL
- new_query = urlencode(final_params) # Encode final_params into a query string
- new_url = m._replace(query=new_query) # Replace the old query string with the new one
- final_url = urlunparse(new_url) # Construct the final URL from the modified parts
+ new_query = urlencode(final_params)
+ new_url = m._replace(query=new_query)
+ final_url = urlunparse(new_url)
return final_url
diff --git a/Src/Api/animeunity/__init__.py b/Src/Api/animeunity/__init__.py
index 9502fd7..c003ab2 100644
--- a/Src/Api/animeunity/__init__.py
+++ b/Src/Api/animeunity/__init__.py
@@ -11,6 +11,7 @@ from .anime import download_film, download_series
# Variable
indice = 1
+_use_for = "anime"
_deprecate = False
diff --git a/Src/Api/animeunity/site.py b/Src/Api/animeunity/site.py
index 5d2bfa9..7c6c48c 100644
--- a/Src/Api/animeunity/site.py
+++ b/Src/Api/animeunity/site.py
@@ -10,6 +10,8 @@ from unidecode import unidecode
# Internal utilities
+from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -99,7 +101,9 @@ def title_search(title: str) -> int:
"""
# Get token and session value from configuration
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain_to_use, _ = search_domain(SITE_NAME, f"https://www.{SITE_NAME}")
+
data = get_token(SITE_NAME, domain_to_use)
# Prepare cookies to be used in the request
@@ -121,8 +125,18 @@ def title_search(title: str) -> int:
}
# Send a POST request to the API endpoint for live search
- response = httpx.post(f'https://www.{SITE_NAME}.{domain_to_use}/livesearch', cookies=cookies, headers=headers, json=json_data)
- response.raise_for_status()
+ try:
+ response = httpx.post(
+ url=f'https://www.{SITE_NAME}.{domain_to_use}/livesearch',
+ cookies=cookies,
+ headers=headers,
+ json=json_data,
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Process each record returned in the response
for dict_title in response.json()['records']:
diff --git a/Src/Api/bitsearch/__init__.py b/Src/Api/bitsearch/__init__.py
index 2800430..722b409 100644
--- a/Src/Api/bitsearch/__init__.py
+++ b/Src/Api/bitsearch/__init__.py
@@ -11,6 +11,7 @@ from .title import download_title
# Variable
indice = 7
+_use_for = "film_serie"
_deprecate = False
diff --git a/Src/Api/bitsearch/site.py b/Src/Api/bitsearch/site.py
index e6e2307..ea62047 100644
--- a/Src/Api/bitsearch/site.py
+++ b/Src/Api/bitsearch/site.py
@@ -7,6 +7,8 @@ from unidecode import unidecode
# Internal utilities
+from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -34,11 +36,20 @@ def title_search(word_to_search: str) -> int:
"""
# Find new domain if prev dont work
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain_to_use, _ = search_domain(SITE_NAME, f"https://{SITE_NAME}")
# Construct the full site URL and load the search page
- response = httpx.get(f"https://{SITE_NAME}.{domain_to_use}/search?q={unidecode(word_to_search)}&category=1&subcat=2&page=1", headers={'user-agent': get_headers()})
- response.raise_for_status()
+ try:
+ response = httpx.get(
+ url=f"https://{SITE_NAME}.{domain_to_use}/search?q={unidecode(word_to_search)}&category=1&subcat=2&page=1",
+ headers={'user-agent': get_headers()},
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Create soup and find table
soup = BeautifulSoup(response.text, "html.parser")
diff --git a/Src/Api/cb01/Player/maxstream.py b/Src/Api/cb01/Player/maxstream.py
index 6c454e6..3b58fb9 100644
--- a/Src/Api/cb01/Player/maxstream.py
+++ b/Src/Api/cb01/Player/maxstream.py
@@ -12,8 +12,12 @@ from bs4 import BeautifulSoup
# Internal utilities
from Src.Util.headers import get_headers
+from Src.Util._jsonConfig import config_manager
+# Variable
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
+
class VideoSource:
def __init__(self, url: str):
"""
@@ -35,7 +39,12 @@ class VideoSource:
try:
# Send a GET request to the initial URL
- response = httpx.get(self.url, headers=self.headers, follow_redirects=True, timeout=10)
+ response = httpx.get(
+ url=self.url,
+ headers=self.headers,
+ follow_redirects=True,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Extract the redirect URL from the HTML
@@ -60,7 +69,12 @@ class VideoSource:
try:
# Send a GET request to the redirect URL
- response = httpx.get(self.redirect_url, headers=self.headers, follow_redirects=True, timeout=10)
+ response = httpx.get(
+ url=self.redirect_url,
+ headers=self.headers,
+ follow_redirects=True,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Extract the Maxstream URL from the HTML
@@ -79,7 +93,7 @@ class VideoSource:
# Make request to stayonline api
data = {'id': self.redirect_url.split("/")[-2], 'ref': ''}
- response = httpx.post('https://stayonline.pro/ajax/linkEmbedView.php', headers=headers, data=data)
+ response = httpx.post('https://stayonline.pro/ajax/linkEmbedView.php', headers=headers, data=data, timeout=max_timeout)
response.raise_for_status()
uprot_url = response.json()['data']['value']
@@ -112,7 +126,12 @@ class VideoSource:
try:
# Send a GET request to the Maxstream URL
- response = httpx.get(self.maxstream_url, headers=self.headers, follow_redirects=True, timeout=10)
+ response = httpx.get(
+ url=self.maxstream_url,
+ headers=self.headers,
+ follow_redirects=True,
+ timeout=max_timeout
+ )
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
diff --git a/Src/Api/cb01/__init__.py b/Src/Api/cb01/__init__.py
index 5a75609..eb2e3f0 100644
--- a/Src/Api/cb01/__init__.py
+++ b/Src/Api/cb01/__init__.py
@@ -11,6 +11,7 @@ from .film import download_film
# Variable
indice = 9
+_use_for = "film"
_deprecate = False
diff --git a/Src/Api/cb01/site.py b/Src/Api/cb01/site.py
index a353f92..f19af88 100644
--- a/Src/Api/cb01/site.py
+++ b/Src/Api/cb01/site.py
@@ -7,6 +7,8 @@ from unidecode import unidecode
# Internal utilities
+from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -34,11 +36,21 @@ def title_search(word_to_search: str) -> int:
"""
# Find new domain if prev dont work
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain_to_use, _ = search_domain(SITE_NAME, f"https://{SITE_NAME}")
# Send request to search for titles
- response = httpx.get(f"https://{SITE_NAME}.{domain_to_use}/?s={unidecode(word_to_search)}", headers={'user-agent': get_headers()}, follow_redirects=True)
- response.raise_for_status()
+ try:
+ response = httpx.get(
+ url=f"https://{SITE_NAME}.{domain_to_use}/?s={unidecode(word_to_search)}",
+ headers={'user-agent': get_headers()},
+ follow_redirects=True,
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Create soup and find table
soup = BeautifulSoup(response.text, "html.parser")
diff --git a/Src/Api/ddlstreamitaly/Player/ddl.py b/Src/Api/ddlstreamitaly/Player/ddl.py
index 9babc9e..50c4117 100644
--- a/Src/Api/ddlstreamitaly/Player/ddl.py
+++ b/Src/Api/ddlstreamitaly/Player/ddl.py
@@ -9,11 +9,13 @@ from bs4 import BeautifulSoup
# Internal utilities
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
# Variable
from ..costant import COOKIE
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
class VideoSource:
@@ -44,7 +46,12 @@ class VideoSource:
- str: The response content if successful, None otherwise.
"""
try:
- response = httpx.get(url, headers=self.headers, cookies=self.cookie)
+ response = httpx.get(
+ url=url,
+ headers=self.headers,
+ cookies=self.cookie,
+ timeout=max_timeout
+ )
response.raise_for_status()
return response.text
diff --git a/Src/Api/ddlstreamitaly/__init__.py b/Src/Api/ddlstreamitaly/__init__.py
index 7e10760..fd9d6fe 100644
--- a/Src/Api/ddlstreamitaly/__init__.py
+++ b/Src/Api/ddlstreamitaly/__init__.py
@@ -14,6 +14,7 @@ from .series import download_thread
# Variable
indice = 3
+_use_for = "serie"
_deprecate = False
diff --git a/Src/Api/ddlstreamitaly/site.py b/Src/Api/ddlstreamitaly/site.py
index 786e348..e8be602 100644
--- a/Src/Api/ddlstreamitaly/site.py
+++ b/Src/Api/ddlstreamitaly/site.py
@@ -11,6 +11,8 @@ from unidecode import unidecode
# Internal utilities
+from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -38,11 +40,20 @@ def title_search(word_to_search: str) -> int:
"""
# Find new domain if prev dont work
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain_to_use, _ = search_domain(SITE_NAME, f"https://{SITE_NAME}")
# Send request to search for titles
- response = httpx.get(f"https://{SITE_NAME}.{domain_to_use}/search/?&q={unidecode(word_to_search)}&quick=1&type=videobox_video&nodes=11", headers={'user-agent': get_headers()})
- response.raise_for_status()
+ try:
+ response = httpx.get(
+ url=f"https://{SITE_NAME}.{domain_to_use}/search/?&q={unidecode(word_to_search)}&quick=1&type=videobox_video&nodes=11",
+ headers={'user-agent': get_headers()},
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Create soup and find table
soup = BeautifulSoup(response.text, "html.parser")
diff --git a/Src/Api/guardaserie/Player/supervideo.py b/Src/Api/guardaserie/Player/supervideo.py
index 9329fc1..7e51b06 100644
--- a/Src/Api/guardaserie/Player/supervideo.py
+++ b/Src/Api/guardaserie/Player/supervideo.py
@@ -11,9 +11,14 @@ from bs4 import BeautifulSoup
# Internal utilities
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
+# Variable
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
+
+
class VideoSource:
def __init__(self) -> None:
"""
@@ -45,7 +50,12 @@ class VideoSource:
"""
try:
- response = httpx.get(url, headers=self.headers, follow_redirects=True, timeout=10)
+ response = httpx.get(
+ url=url,
+ headers=self.headers,
+ follow_redirects=True,
+ timeout=max_timeout
+ )
response.raise_for_status()
return response.text
diff --git a/Src/Api/guardaserie/__init__.py b/Src/Api/guardaserie/__init__.py
index 04ff0d6..b078e0e 100644
--- a/Src/Api/guardaserie/__init__.py
+++ b/Src/Api/guardaserie/__init__.py
@@ -11,6 +11,7 @@ from .series import download_series
# Variable
indice = 4
+_use_for = "serie"
_deprecate = False
diff --git a/Src/Api/guardaserie/site.py b/Src/Api/guardaserie/site.py
index 3d5191a..a8c822c 100644
--- a/Src/Api/guardaserie/site.py
+++ b/Src/Api/guardaserie/site.py
@@ -7,6 +7,8 @@ from unidecode import unidecode
# Internal utilities
+from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -34,11 +36,20 @@ def title_search(word_to_search: str) -> int:
"""
# Find new domain if prev dont work
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
domain_to_use, _ = search_domain(SITE_NAME, f"https://{SITE_NAME}")
# Send request to search for titles
- response = httpx.get(f"https://guardaserie.{domain_to_use}/?story={unidecode(word_to_search)}&do=search&subaction=search", headers={'user-agent': get_headers()}, timeout=15)
- response.raise_for_status()
+ try:
+ response = httpx.get(
+ url=f"https://guardaserie.{domain_to_use}/?story={unidecode(word_to_search)}&do=search&subaction=search",
+ headers={'user-agent': get_headers()},
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Create soup and find table
soup = BeautifulSoup(response.text, "html.parser")
diff --git a/Src/Api/mostraguarda/__init__.py b/Src/Api/mostraguarda/__init__.py
index ba075f0..4589acd 100644
--- a/Src/Api/mostraguarda/__init__.py
+++ b/Src/Api/mostraguarda/__init__.py
@@ -11,6 +11,7 @@ from .film import download_film
# Variable
indice = 9
+_use_for = "film"
_deprecate = False
diff --git a/Src/Api/mostraguarda/film.py b/Src/Api/mostraguarda/film.py
index f9a4b40..fdba5dd 100644
--- a/Src/Api/mostraguarda/film.py
+++ b/Src/Api/mostraguarda/film.py
@@ -2,6 +2,7 @@
import os
import sys
+import time
import logging
@@ -11,11 +12,12 @@ from bs4 import BeautifulSoup
# Internal utilities
+from Src.Util.console import console, msg
from Src.Util.message import start_message
-from Src.Util.console import console
-from Src.Util.os import can_create_file, remove_special_characters
+from Src.Util.call_stack import get_call_stack
from Src.Util.headers import get_headers
from Src.Lib.Downloader import HLS_Downloader
+from ..Template import execute_search
# Logic class
@@ -60,19 +62,17 @@ def download_film(movie_details: Json_film):
video_source.setup(supervideo_url)
# Define output path
- mp4_name = remove_special_characters(movie_details.title) + ".mp4"
- mp4_path = os.path.join(ROOT_PATH, SITE_NAME, MOVIE_FOLDER, remove_special_characters(movie_details.title))
-
- # Check if the MP4 file can be created
- if not can_create_file(mp4_name):
- logging.error("Invalid mp4 name.")
- sys.exit(0)
+ mp4_name = movie_details.title + ".mp4"
+ mp4_path = os.path.join(ROOT_PATH, SITE_NAME, MOVIE_FOLDER, movie_details.title)
# Get m3u8 master playlist
master_playlist = video_source.get_playlist()
# Download the film using the m3u8 playlist, and output filename
- HLS_Downloader(
- m3u8_playlist = master_playlist,
- output_filename = os.path.join(mp4_path, mp4_name)
- ).start()
+ if HLS_Downloader(m3u8_playlist = master_playlist, output_filename = os.path.join(mp4_path, mp4_name)).start() == 404:
+ time.sleep(2)
+
+ # Re call search function
+ if msg.ask("[green]Do you want to continue [white]([red]y[white])[green] or return at home[white]([red]n[white]) ", choices=['y', 'n'], default='y', show_choices=True) == "n":
+ frames = get_call_stack()
+ execute_search(frames[-4])
diff --git a/Src/Api/streamingcommunity/Core/Player/vixcloud.py b/Src/Api/streamingcommunity/Core/Player/vixcloud.py
index 79102db..025cc9d 100644
--- a/Src/Api/streamingcommunity/Core/Player/vixcloud.py
+++ b/Src/Api/streamingcommunity/Core/Player/vixcloud.py
@@ -11,6 +11,7 @@ from bs4 import BeautifulSoup
# Internal utilities
+from Src.Util._jsonConfig import config_manager
from Src.Util.headers import get_headers
from Src.Util.console import console, Panel
@@ -23,6 +24,7 @@ from ..Class.WindowType import WindowVideo, WindowParameter, DynamicJSONConverte
# Variable
from ...costant import SITE_NAME
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
class VideoSource:
@@ -66,7 +68,11 @@ class VideoSource:
try:
- response = httpx.get(f"https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}", headers=self.headers, timeout=15)
+ response = httpx.get(
+ url=f"https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}",
+ headers=self.headers,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Extract JSON response if available
@@ -90,7 +96,11 @@ class VideoSource:
try:
# Make a request to collect information about a specific season
- response = httpx.get(f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}', headers=self.headers, timeout=15)
+ response = httpx.get(
+ url=f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
+ headers=self.headers,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Extract JSON response if available
@@ -122,7 +132,11 @@ class VideoSource:
try:
# Make a request to get iframe source
- response = httpx.get(f"https://{self.base_name}.{self.domain}/iframe/{self.media_id}", params=params, timeout=15)
+ response = httpx.get(
+ url=f"https://{self.base_name}.{self.domain}/iframe/{self.media_id}",
+ params=params,
+ timeout=max_timeout
+ )
response.raise_for_status()
# Parse response with BeautifulSoup to get iframe source
@@ -164,7 +178,11 @@ class VideoSource:
# Make a request to get content
try:
- response = httpx.get(self.iframe_src, headers=self.headers, timeout=15)
+ response = httpx.get(
+ url=self.iframe_src,
+ headers=self.headers,
+ timeout=max_timeout
+ )
response.raise_for_status()
except Exception as e:
@@ -172,14 +190,12 @@ class VideoSource:
console.print(Panel("[red bold]Coming soon", title="Notification", title_align="left", border_style="yellow"))
sys.exit(0)
- if response.status_code == 200:
+ # Parse response with BeautifulSoup to get content
+ soup = BeautifulSoup(response.text, "html.parser")
+ script = soup.find("body").find("script").text
- # Parse response with BeautifulSoup to get content
- soup = BeautifulSoup(response.text, "html.parser")
- script = soup.find("body").find("script").text
-
- # Parse script to get video information
- self.parse_script(script_text=script)
+ # Parse script to get video information
+ self.parse_script(script_text=script)
except Exception as e:
logging.error(f"Error getting content: {e}")
diff --git a/Src/Api/streamingcommunity/__init__.py b/Src/Api/streamingcommunity/__init__.py
index 7ab51c2..2843be6 100644
--- a/Src/Api/streamingcommunity/__init__.py
+++ b/Src/Api/streamingcommunity/__init__.py
@@ -12,6 +12,7 @@ from .series import download_series
# Variable
indice = 0
+_use_for = "film_serie"
_deprecate = False
diff --git a/Src/Api/streamingcommunity/site.py b/Src/Api/streamingcommunity/site.py
index f0317b7..afc19f7 100644
--- a/Src/Api/streamingcommunity/site.py
+++ b/Src/Api/streamingcommunity/site.py
@@ -12,8 +12,9 @@ from unidecode import unidecode
# Internal utilities
-from Src.Util.headers import get_headers
from Src.Util.console import console
+from Src.Util._jsonConfig import config_manager
+from Src.Util.headers import get_headers
from Src.Util.table import TVShowManager
from ..Template import search_domain, get_select_title
@@ -90,10 +91,20 @@ def title_search(title_search: str, domain: str) -> int:
Returns:
int: The number of titles found.
"""
+
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
# Send request to search for titles ( replace à to a and space to "+" )
- response = httpx.get(f"https://{SITE_NAME}.{domain}/api/search?q={unidecode(title_search.replace(' ', '+'))}", headers={'user-agent': get_headers()})
- response.raise_for_status()
+ try:
+ response = httpx.get(
+ url=f"https://{SITE_NAME}.{domain}/api/search?q={unidecode(title_search.replace(' ', '+'))}",
+ headers={'user-agent': get_headers()},
+ timeout=max_timeout
+ )
+ response.raise_for_status()
+
+ except Exception as e:
+ console.print(f"Site: {SITE_NAME}, request search error: {e}")
# Add found titles to media search manager
for dict_title in response.json()['data']:
diff --git a/Src/Lib/Downloader/HLS/downloader.py b/Src/Lib/Downloader/HLS/downloader.py
index 9587065..da1db1a 100644
--- a/Src/Lib/Downloader/HLS/downloader.py
+++ b/Src/Lib/Downloader/HLS/downloader.py
@@ -55,6 +55,7 @@ FILTER_CUSTOM_REOLUTION = config_manager.get_int('M3U8_PARSER', 'force_resolutio
# Variable
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
m3u8_url_fixer = M3U8_UrlFix()
@@ -97,7 +98,7 @@ class HttpClient:
"""
self.headers = headers
- def get(self, url: str, timeout: int=20):
+ def get(self, url: str):
"""
Sends a GET request to the specified URL and returns the response as text.
@@ -105,7 +106,11 @@ class HttpClient:
str: The response body as text if the request is successful, None otherwise.
"""
try:
- response = httpx.get(url, headers=self.headers, timeout=timeout)
+ response = httpx.get(
+ url=url,
+ headers=self.headers,
+ timeout=max_timeout
+ )
response.raise_for_status()
return response.text # Return the response text
@@ -114,7 +119,7 @@ class HttpClient:
logging.error(f"Request to {url} failed: {response.status_code} when get text.")
return 404
- def get_content(self, url, timeout=20):
+ def get_content(self, url):
"""
Sends a GET request to the specified URL and returns the raw response content.
@@ -122,7 +127,11 @@ class HttpClient:
bytes: The response content as bytes if the request is successful, None otherwise.
"""
try:
- response = httpx.get(url, headers=self.headers, timeout=timeout)
+ response = httpx.get(
+ url=url,
+ headers=self.headers,
+ timeout=max_timeout
+ )
response.raise_for_status()
return response.content # Return the raw response content
diff --git a/Src/Lib/Downloader/HLS/segments.py b/Src/Lib/Downloader/HLS/segments.py
index c0121ee..b940433 100644
--- a/Src/Lib/Downloader/HLS/segments.py
+++ b/Src/Lib/Downloader/HLS/segments.py
@@ -38,7 +38,6 @@ from .proxyes import main_test_proxy
# Config
TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
-REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
REQUEST_VERIFY = config_manager.get_bool('REQUESTS', 'verify_ssl')
THERE_IS_PROXY_LIST = check_file_existence("list_proxy.txt")
@@ -48,6 +47,7 @@ PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
# Variable
headers_index = config_manager.get_dict('REQUESTS', 'user-agent')
+max_timeout = config_manager.get_int("REQUESTS", "timeout")
@@ -98,7 +98,11 @@ class M3U8_Segments:
# Make request to get porxy
try:
- response = httpx.get(key_uri, headers=headers_index)
+ response = httpx.get(
+ url=key_uri,
+ headers=headers_index,
+ timeout=max_timeout
+ )
response.raise_for_status()
except Exception as e:
@@ -214,16 +218,38 @@ class M3U8_Segments:
with httpx.Client(proxies=proxy, verify=need_verify) as client:
if 'key_base_url' in self.__dict__:
- response = client.get(ts_url, headers=random_headers(self.key_base_url), timeout=REQUEST_TIMEOUT, follow_redirects=True)
+ response = client.get(
+ url=ts_url,
+ headers=random_headers(self.key_base_url),
+ timeout=max_timeout,
+ follow_redirects=True
+ )
+
else:
- response = client.get(ts_url, headers={'user-agent': get_headers()}, timeout=REQUEST_TIMEOUT, follow_redirects=True)
+ response = client.get(
+ url=ts_url,
+ headers={'user-agent': get_headers()},
+ timeout=max_timeout,
+ follow_redirects=True
+ )
else:
with httpx.Client(verify=need_verify) as client_2:
if 'key_base_url' in self.__dict__:
- response = client_2.get(ts_url, headers=random_headers(self.key_base_url), timeout=REQUEST_TIMEOUT, follow_redirects=True)
+ response = client_2.get(
+ url=ts_url,
+ headers=random_headers(self.key_base_url),
+ timeout=max_timeout,
+ follow_redirects=True
+ )
+
else:
- response = client_2.get(ts_url, headers={'user-agent': get_headers()}, timeout=REQUEST_TIMEOUT, follow_redirects=True)
+ response = client_2.get(
+ url=ts_url,
+ headers={'user-agent': get_headers()},
+ timeout=max_timeout,
+ follow_redirects=True
+ )
# Get response content
response.raise_for_status() # Raise exception for HTTP errors
diff --git a/Src/Lib/Downloader/MP4/downloader.py b/Src/Lib/Downloader/MP4/downloader.py
index 0070abe..f596a13 100644
--- a/Src/Lib/Downloader/MP4/downloader.py
+++ b/Src/Lib/Downloader/MP4/downloader.py
@@ -55,7 +55,7 @@ def MP4_downloader(url: str, path: str, referer: str = None, headers_: str = Non
# Make request to get content of video
with httpx.Client(verify=REQUEST_VERIFY, timeout=REQUEST_TIMEOUT) as client:
- with client.stream("GET", url, headers=headers, timeout=10) as response:
+ with client.stream("GET", url, headers=headers, timeout=REQUEST_TIMEOUT) as response:
total = int(response.headers.get('content-length', 0))
if total != 0:
diff --git a/config.json b/config.json
index e77a4fb..811a287 100644
--- a/config.json
+++ b/config.json
@@ -17,7 +17,7 @@
"show_trending": false
},
"REQUESTS": {
- "timeout": 10,
+ "timeout": 15,
"max_retry": 3,
"verify_ssl": true,
"user-agent": "",
diff --git a/run.py b/run.py
index d1063da..e01a1b9 100644
--- a/run.py
+++ b/run.py
@@ -42,7 +42,6 @@ def run_function(func: Callable[..., None], close_console: bool = False) -> None
def load_search_functions():
-
modules = []
loaded_functions = {}
@@ -53,10 +52,8 @@ def load_search_functions():
logging.info(f"Base folder path: {api_dir}")
logging.info(f"Api module path: {init_files}")
-
# Retrieve modules and their indices
for init_file in init_files:
-
# Get folder name as module name
module_name = os.path.basename(os.path.dirname(init_file))
logging.info(f"Load module name: {module_name}")
@@ -64,14 +61,13 @@ def load_search_functions():
try:
# Dynamically import the module
mod = importlib.import_module(f'Src.Api.{module_name}')
-
# Get 'indice' from the module
indice = getattr(mod, 'indice', 0)
is_deprecate = bool(getattr(mod, '_deprecate', True))
+ use_for = getattr(mod, '_use_for', 'other')
- # Add module and indice to the list
if not is_deprecate:
- modules.append((module_name, indice))
+ modules.append((module_name, indice, use_for))
except Exception as e:
console.print(f"[red]Failed to import module {module_name}: {str(e)}")
@@ -80,13 +76,14 @@ def load_search_functions():
modules.sort(key=lambda x: x[1])
# Load search functions in the sorted order
- for module_name, _ in modules:
+ for module_name, _, use_for in modules:
# Construct a unique alias for the module
module_alias = f'{module_name}_search'
logging.info(f"Module alias: {module_alias}")
try:
+
# Dynamically import the module
mod = importlib.import_module(f'Src.Api.{module_name}')
@@ -94,7 +91,7 @@ def load_search_functions():
search_function = getattr(mod, 'search')
# Add the function to the loaded functions dictionary
- loaded_functions[module_alias] = search_function
+ loaded_functions[module_alias] = (search_function, use_for)
except Exception as e:
console.print(f"[red]Failed to load search function from module {module_name}: {str(e)}")
@@ -149,17 +146,25 @@ def main():
# Create dynamic argument parser
parser = argparse.ArgumentParser(description='Script to download film and series from the internet.')
+ color_map = {
+ "anime": "red",
+ "film_serie": "yellow",
+ "film": "blue",
+ "serie": "green",
+ "other": "white"
+ }
+
# Add dynamic arguments based on loaded search modules
- for alias in search_functions.keys():
- short_option = alias[:3].upper() # Take the first three letters of the alias in uppercase
- long_option = alias # Use the full alias as the full option name
+ for alias, (_, use_for) in search_functions.items():
+ short_option = alias[:3].upper()
+ long_option = alias
parser.add_argument(f'-{short_option}', f'--{long_option}', action='store_true', help=f'Search for {alias.split("_")[0]} on streaming platforms.')
# Parse command line arguments
args = parser.parse_args()
# Mapping command-line arguments to functions
- arg_to_function = {alias: search_functions[alias] for alias in search_functions.keys()}
+ arg_to_function = {alias: func for alias, (func, _) in search_functions.items()}
# Check which argument is provided and run the corresponding function
for arg, func in arg_to_function.items():
@@ -168,14 +173,22 @@ def main():
return
# Mapping user input to functions
- input_to_function = {str(i): search_functions[alias] for i, alias in enumerate(search_functions.keys())}
+ input_to_function = {str(i): func for i, (alias, (func, _)) in enumerate(search_functions.items())}
# Create dynamic prompt message and choices
- choice_labels = {str(i): alias.split("_")[0].capitalize() for i, alias in enumerate(search_functions.keys())}
- prompt_message = f"[green]Insert category [white]({', '.join([f'[red]{key}: [magenta]{label}' for key, label in choice_labels.items()])}[white]): "
+ choice_labels = {str(i): (alias.split("_")[0].capitalize(), use_for) for i, (alias, (_, use_for)) in enumerate(search_functions.items())}
+
+ # Display the category legend in a single line
+ legend_text = " | ".join([f"[{color}]{category.capitalize()}[/{color}]" for category, color in color_map.items()])
+ console.print(f"[bold green]Category Legend:[/bold green] {legend_text}")
+
+ # Construct the prompt message with color-coded site names
+ prompt_message = "[green]Insert category [white](" + ", ".join(
+ [f"{key}: [{color_map[label[1]]}]{label[0]}[/{color_map[label[1]]}]" for key, label in choice_labels.items()]
+ ) + "[white])"
# Ask the user for input
- category = msg.ask(prompt_message, choices=list(choice_labels.keys()), default="0")
+ category = msg.ask(prompt_message, choices=list(choice_labels.keys()), default="0", show_choices=False, show_default=False)
# Run the corresponding function based on user input
if category in input_to_function: