core: Fix requirements

This commit is contained in:
Lovi 2025-06-02 18:14:36 +02:00
parent 6efeb96201
commit 3cbabfb98b
7 changed files with 70 additions and 36 deletions

View File

@ -5,9 +5,9 @@ import logging
# External libraries # External libraries
import httpx
import jsbeautifier import jsbeautifier
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from curl_cffi import requests
# Internal utilities # Internal utilities
@ -28,7 +28,6 @@ class VideoSource:
- url (str): The URL of the video source. - url (str): The URL of the video source.
""" """
self.headers = get_headers() self.headers = get_headers()
self.client = httpx.Client()
self.url = url self.url = url
def make_request(self, url: str) -> str: def make_request(self, url: str) -> str:
@ -42,8 +41,10 @@ class VideoSource:
- str: The response content if successful, None otherwise. - str: The response content if successful, None otherwise.
""" """
try: try:
response = self.client.get(url, headers=self.headers, timeout=MAX_TIMEOUT, follow_redirects=True) response = requests.get(url, headers=self.headers, timeout=MAX_TIMEOUT, impersonate="chrome110")
response.raise_for_status() if response.status_code >= 400:
logging.error(f"Request failed with status code: {response.status_code}")
return None
return response.text return response.text
except Exception as e: except Exception as e:

View File

@ -39,6 +39,7 @@ class VideoSource:
self.is_series = is_series self.is_series = is_series
self.media_id = media_id self.media_id = media_id
self.iframe_src = None self.iframe_src = None
self.window_parameter = None
def get_iframe(self, episode_id: int) -> None: def get_iframe(self, episode_id: int) -> None:
""" """
@ -109,41 +110,45 @@ class VideoSource:
# Parse script to get video information # Parse script to get video information
self.parse_script(script_text=script) self.parse_script(script_text=script)
except httpx.HTTPStatusError as e:
if e.response.status_code == 404:
console.print("[yellow]This content will be available soon![/yellow]")
return
logging.error(f"Error getting content: {e}")
raise
except Exception as e: except Exception as e:
logging.error(f"Error getting content: {e}") logging.error(f"Error getting content: {e}")
raise raise
def get_playlist(self) -> str: def get_playlist(self) -> str | None:
""" """
Generate authenticated playlist URL. Generate authenticated playlist URL.
Returns: Returns:
str: Fully constructed playlist URL with authentication parameters str | None: Fully constructed playlist URL with authentication parameters, or None if content unavailable
""" """
if not self.window_parameter:
return None
params = {} params = {}
# Add 'h' parameter if video quality is 1080p
if self.canPlayFHD: if self.canPlayFHD:
params['h'] = 1 params['h'] = 1
# Parse the original URL
parsed_url = urlparse(self.window_parameter.url) parsed_url = urlparse(self.window_parameter.url)
query_params = parse_qs(parsed_url.query) query_params = parse_qs(parsed_url.query)
# Check specifically for 'b=1' in the query parameters
if 'b' in query_params and query_params['b'] == ['1']: if 'b' in query_params and query_params['b'] == ['1']:
params['b'] = 1 params['b'] = 1
# Add authentication parameters (token and expiration)
params.update({ params.update({
"token": self.window_parameter.token, "token": self.window_parameter.token,
"expires": self.window_parameter.expires "expires": self.window_parameter.expires
}) })
# Build the updated query string
query_string = urlencode(params) query_string = urlencode(params)
# Construct the new URL with updated query parameters
return urlunparse(parsed_url._replace(query=query_string)) return urlunparse(parsed_url._replace(query=query_string))

View File

@ -61,16 +61,22 @@ def download_film(select_title: MediaItem) -> str:
# Extract mostraguarda URL # Extract mostraguarda URL
try: try:
response = httpx.get(select_title.url, headers=get_headers(), timeout=10) response = httpx.get(select_title.url, headers=get_headers(), timeout=10)
response.raise_for_status()
soup = BeautifulSoup(response.text, 'html.parser') soup = BeautifulSoup(response.text, 'html.parser')
iframes = soup.find_all('iframe') iframes = soup.find_all('iframe')
mostraguarda = iframes[0]['src'] mostraguarda = iframes[0]['src']
except Exception as e: except Exception as e:
console.print(f"[red]Site: {site_constant.SITE_NAME}, request error: {e}, get mostraguarda") console.print(f"[red]Site: {site_constant.SITE_NAME}, request error: {e}, get mostraguarda")
return None
# Extract supervideo URL # Extract supervideo URL
supervideo_url = None
try: try:
response = httpx.get(mostraguarda, headers=get_headers(), timeout=10) response = httpx.get(mostraguarda, headers=get_headers(), timeout=10)
response.raise_for_status()
soup = BeautifulSoup(response.text, 'html.parser') soup = BeautifulSoup(response.text, 'html.parser')
pattern = r'//supervideo\.[^/]+/[a-z]/[a-zA-Z0-9]+' pattern = r'//supervideo\.[^/]+/[a-z]/[a-zA-Z0-9]+'
supervideo_match = re.search(pattern, response.text) supervideo_match = re.search(pattern, response.text)
@ -78,7 +84,9 @@ def download_film(select_title: MediaItem) -> str:
except Exception as e: except Exception as e:
console.print(f"[red]Site: {site_constant.SITE_NAME}, request error: {e}, get supervideo URL") console.print(f"[red]Site: {site_constant.SITE_NAME}, request error: {e}, get supervideo URL")
console.print("[yellow]This content will be available soon![/yellow]")
return None
# Init class # Init class
video_source = VideoSource(supervideo_url) video_source = VideoSource(supervideo_url)
master_playlist = video_source.get_playlist() master_playlist = video_source.get_playlist()

View File

@ -38,38 +38,52 @@ class GetSerieInfo:
soup = BeautifulSoup(response.text, "html.parser") soup = BeautifulSoup(response.text, "html.parser")
self.series_name = soup.find("title").get_text(strip=True).split(" - ")[0] self.series_name = soup.find("title").get_text(strip=True).split(" - ")[0]
# Process all seasons # Find all season dropdowns
season_items = soup.find_all('div', class_='accordion-item') seasons_dropdown = soup.find('div', class_='dropdown seasons')
if not seasons_dropdown:
for season_idx, season_item in enumerate(season_items, 1): return
season_header = season_item.find('div', class_='accordion-header')
if not season_header: # Get all season items
continue season_items = seasons_dropdown.find_all('span', {'data-season': True})
season_name = season_header.get_text(strip=True) for season_item in season_items:
season_num = int(season_item['data-season'])
season_name = season_item.get_text(strip=True)
# Create a new season and get a reference to it # Create a new season
current_season = self.seasons_manager.add_season({ current_season = self.seasons_manager.add_season({
'number': season_idx, 'number': season_num,
'name': season_name 'name': season_name
}) })
# Find episodes for this season # Find all episodes for this season
episode_divs = season_item.find_all('div', class_='down-episode') episodes_container = soup.find('div', {'class': 'dropdown mirrors', 'data-season': str(season_num)})
for ep_idx, ep_div in enumerate(episode_divs, 1): if not episodes_container:
episode_name_tag = ep_div.find('b') continue
if not episode_name_tag:
# Get all episode mirrors for this season
episode_mirrors = soup.find_all('div', {'class': 'dropdown mirrors',
'data-season': str(season_num)})
for mirror in episode_mirrors:
episode_data = mirror.get('data-episode', '').split('-')
if len(episode_data) != 2:
continue continue
episode_name = episode_name_tag.get_text(strip=True) ep_num = int(episode_data[1])
link_tag = ep_div.find('a', string=lambda text: text and "Supervideo" in text)
episode_url = link_tag['href'] if link_tag else None # Find supervideo link
supervideo_span = mirror.find('span', {'data-id': 'supervideo'})
if not supervideo_span:
continue
episode_url = supervideo_span.get('data-link', '')
# Add episode to the season # Add episode to the season
if current_season: if current_season:
current_season.episodes.add({ current_season.episodes.add({
'number': ep_idx, 'number': ep_num,
'name': episode_name, 'name': f"Episodio {ep_num}",
'url': episode_url 'url': episode_url
}) })

View File

@ -62,6 +62,10 @@ def download_film(select_title: MediaItem, proxy: str = None) -> str:
video_source.get_content() video_source.get_content()
master_playlist = video_source.get_playlist() master_playlist = video_source.get_playlist()
if master_playlist is None:
console.print(f"[red]Site: {site_constant.SITE_NAME}, error: No master playlist found[/red]")
return None
# Define the filename and path for the downloaded film # Define the filename and path for the downloaded film
title_name = os_manager.get_sanitize_file(select_title.name) + ".mp4" title_name = os_manager.get_sanitize_file(select_title.name) + ".mp4"
mp4_path = os.path.join(site_constant.MOVIE_FOLDER, title_name.replace(".mp4", "")) mp4_path = os.path.join(site_constant.MOVIE_FOLDER, title_name.replace(".mp4", ""))

View File

@ -79,7 +79,7 @@ def update():
try: try:
current_version = importlib.metadata.version(__title__) current_version = importlib.metadata.version(__title__)
except importlib.metadata.PackageNotFoundError: except importlib.metadata.PackageNotFoundError:
console.print(f"[yellow]Warning: Could not determine installed version for '{__title__}' via importlib.metadata. Falling back to source version.[/yellow]") #console.print(f"[yellow]Warning: Could not determine installed version for '{__title__}' via importlib.metadata. Falling back to source version.[/yellow]")
current_version = source_code_version current_version = source_code_version
# Get commit details # Get commit details

View File

@ -6,6 +6,7 @@ m3u8
certifi certifi
psutil psutil
unidecode unidecode
curl_cffi
dnspython dnspython
jsbeautifier jsbeautifier
pathvalidate pathvalidate
@ -13,3 +14,4 @@ pycryptodomex
ua-generator ua-generator
qbittorrent-api qbittorrent-api
pyTelegramBotAPI pyTelegramBotAPI
beautifulsoup4