mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-05 02:55:25 +00:00
core: Fix requirements
This commit is contained in:
parent
6efeb96201
commit
3cbabfb98b
@ -5,9 +5,9 @@ import logging
|
||||
|
||||
|
||||
# External libraries
|
||||
import httpx
|
||||
import jsbeautifier
|
||||
from bs4 import BeautifulSoup
|
||||
from curl_cffi import requests
|
||||
|
||||
|
||||
# Internal utilities
|
||||
@ -28,7 +28,6 @@ class VideoSource:
|
||||
- url (str): The URL of the video source.
|
||||
"""
|
||||
self.headers = get_headers()
|
||||
self.client = httpx.Client()
|
||||
self.url = url
|
||||
|
||||
def make_request(self, url: str) -> str:
|
||||
@ -42,8 +41,10 @@ class VideoSource:
|
||||
- str: The response content if successful, None otherwise.
|
||||
"""
|
||||
try:
|
||||
response = self.client.get(url, headers=self.headers, timeout=MAX_TIMEOUT, follow_redirects=True)
|
||||
response.raise_for_status()
|
||||
response = requests.get(url, headers=self.headers, timeout=MAX_TIMEOUT, impersonate="chrome110")
|
||||
if response.status_code >= 400:
|
||||
logging.error(f"Request failed with status code: {response.status_code}")
|
||||
return None
|
||||
return response.text
|
||||
|
||||
except Exception as e:
|
||||
|
@ -39,6 +39,7 @@ class VideoSource:
|
||||
self.is_series = is_series
|
||||
self.media_id = media_id
|
||||
self.iframe_src = None
|
||||
self.window_parameter = None
|
||||
|
||||
def get_iframe(self, episode_id: int) -> None:
|
||||
"""
|
||||
@ -109,41 +110,45 @@ class VideoSource:
|
||||
# Parse script to get video information
|
||||
self.parse_script(script_text=script)
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
if e.response.status_code == 404:
|
||||
console.print("[yellow]This content will be available soon![/yellow]")
|
||||
return
|
||||
|
||||
logging.error(f"Error getting content: {e}")
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting content: {e}")
|
||||
raise
|
||||
|
||||
def get_playlist(self) -> str:
|
||||
def get_playlist(self) -> str | None:
|
||||
"""
|
||||
Generate authenticated playlist URL.
|
||||
|
||||
Returns:
|
||||
str: Fully constructed playlist URL with authentication parameters
|
||||
str | None: Fully constructed playlist URL with authentication parameters, or None if content unavailable
|
||||
"""
|
||||
if not self.window_parameter:
|
||||
return None
|
||||
|
||||
params = {}
|
||||
|
||||
# Add 'h' parameter if video quality is 1080p
|
||||
if self.canPlayFHD:
|
||||
params['h'] = 1
|
||||
|
||||
# Parse the original URL
|
||||
parsed_url = urlparse(self.window_parameter.url)
|
||||
query_params = parse_qs(parsed_url.query)
|
||||
|
||||
# Check specifically for 'b=1' in the query parameters
|
||||
if 'b' in query_params and query_params['b'] == ['1']:
|
||||
params['b'] = 1
|
||||
|
||||
# Add authentication parameters (token and expiration)
|
||||
params.update({
|
||||
"token": self.window_parameter.token,
|
||||
"expires": self.window_parameter.expires
|
||||
})
|
||||
|
||||
# Build the updated query string
|
||||
query_string = urlencode(params)
|
||||
|
||||
# Construct the new URL with updated query parameters
|
||||
return urlunparse(parsed_url._replace(query=query_string))
|
||||
|
||||
|
||||
|
@ -61,16 +61,22 @@ def download_film(select_title: MediaItem) -> str:
|
||||
# Extract mostraguarda URL
|
||||
try:
|
||||
response = httpx.get(select_title.url, headers=get_headers(), timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
iframes = soup.find_all('iframe')
|
||||
mostraguarda = iframes[0]['src']
|
||||
|
||||
except Exception as e:
|
||||
console.print(f"[red]Site: {site_constant.SITE_NAME}, request error: {e}, get mostraguarda")
|
||||
return None
|
||||
|
||||
# Extract supervideo URL
|
||||
supervideo_url = None
|
||||
try:
|
||||
response = httpx.get(mostraguarda, headers=get_headers(), timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
pattern = r'//supervideo\.[^/]+/[a-z]/[a-zA-Z0-9]+'
|
||||
supervideo_match = re.search(pattern, response.text)
|
||||
@ -78,7 +84,9 @@ def download_film(select_title: MediaItem) -> str:
|
||||
|
||||
except Exception as e:
|
||||
console.print(f"[red]Site: {site_constant.SITE_NAME}, request error: {e}, get supervideo URL")
|
||||
|
||||
console.print("[yellow]This content will be available soon![/yellow]")
|
||||
return None
|
||||
|
||||
# Init class
|
||||
video_source = VideoSource(supervideo_url)
|
||||
master_playlist = video_source.get_playlist()
|
||||
|
@ -38,38 +38,52 @@ class GetSerieInfo:
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
self.series_name = soup.find("title").get_text(strip=True).split(" - ")[0]
|
||||
|
||||
# Process all seasons
|
||||
season_items = soup.find_all('div', class_='accordion-item')
|
||||
|
||||
for season_idx, season_item in enumerate(season_items, 1):
|
||||
season_header = season_item.find('div', class_='accordion-header')
|
||||
if not season_header:
|
||||
continue
|
||||
|
||||
season_name = season_header.get_text(strip=True)
|
||||
# Find all season dropdowns
|
||||
seasons_dropdown = soup.find('div', class_='dropdown seasons')
|
||||
if not seasons_dropdown:
|
||||
return
|
||||
|
||||
# Get all season items
|
||||
season_items = seasons_dropdown.find_all('span', {'data-season': True})
|
||||
|
||||
for season_item in season_items:
|
||||
season_num = int(season_item['data-season'])
|
||||
season_name = season_item.get_text(strip=True)
|
||||
|
||||
# Create a new season and get a reference to it
|
||||
# Create a new season
|
||||
current_season = self.seasons_manager.add_season({
|
||||
'number': season_idx,
|
||||
'number': season_num,
|
||||
'name': season_name
|
||||
})
|
||||
|
||||
# Find episodes for this season
|
||||
episode_divs = season_item.find_all('div', class_='down-episode')
|
||||
for ep_idx, ep_div in enumerate(episode_divs, 1):
|
||||
episode_name_tag = ep_div.find('b')
|
||||
if not episode_name_tag:
|
||||
# Find all episodes for this season
|
||||
episodes_container = soup.find('div', {'class': 'dropdown mirrors', 'data-season': str(season_num)})
|
||||
if not episodes_container:
|
||||
continue
|
||||
|
||||
# Get all episode mirrors for this season
|
||||
episode_mirrors = soup.find_all('div', {'class': 'dropdown mirrors',
|
||||
'data-season': str(season_num)})
|
||||
|
||||
for mirror in episode_mirrors:
|
||||
episode_data = mirror.get('data-episode', '').split('-')
|
||||
if len(episode_data) != 2:
|
||||
continue
|
||||
|
||||
episode_name = episode_name_tag.get_text(strip=True)
|
||||
link_tag = ep_div.find('a', string=lambda text: text and "Supervideo" in text)
|
||||
episode_url = link_tag['href'] if link_tag else None
|
||||
ep_num = int(episode_data[1])
|
||||
|
||||
# Find supervideo link
|
||||
supervideo_span = mirror.find('span', {'data-id': 'supervideo'})
|
||||
if not supervideo_span:
|
||||
continue
|
||||
|
||||
episode_url = supervideo_span.get('data-link', '')
|
||||
|
||||
# Add episode to the season
|
||||
if current_season:
|
||||
current_season.episodes.add({
|
||||
'number': ep_idx,
|
||||
'name': episode_name,
|
||||
'number': ep_num,
|
||||
'name': f"Episodio {ep_num}",
|
||||
'url': episode_url
|
||||
})
|
||||
|
||||
|
@ -62,6 +62,10 @@ def download_film(select_title: MediaItem, proxy: str = None) -> str:
|
||||
video_source.get_content()
|
||||
master_playlist = video_source.get_playlist()
|
||||
|
||||
if master_playlist is None:
|
||||
console.print(f"[red]Site: {site_constant.SITE_NAME}, error: No master playlist found[/red]")
|
||||
return None
|
||||
|
||||
# Define the filename and path for the downloaded film
|
||||
title_name = os_manager.get_sanitize_file(select_title.name) + ".mp4"
|
||||
mp4_path = os.path.join(site_constant.MOVIE_FOLDER, title_name.replace(".mp4", ""))
|
||||
|
@ -79,7 +79,7 @@ def update():
|
||||
try:
|
||||
current_version = importlib.metadata.version(__title__)
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
console.print(f"[yellow]Warning: Could not determine installed version for '{__title__}' via importlib.metadata. Falling back to source version.[/yellow]")
|
||||
#console.print(f"[yellow]Warning: Could not determine installed version for '{__title__}' via importlib.metadata. Falling back to source version.[/yellow]")
|
||||
current_version = source_code_version
|
||||
|
||||
# Get commit details
|
||||
|
@ -6,6 +6,7 @@ m3u8
|
||||
certifi
|
||||
psutil
|
||||
unidecode
|
||||
curl_cffi
|
||||
dnspython
|
||||
jsbeautifier
|
||||
pathvalidate
|
||||
@ -13,3 +14,4 @@ pycryptodomex
|
||||
ua-generator
|
||||
qbittorrent-api
|
||||
pyTelegramBotAPI
|
||||
beautifulsoup4
|
Loading…
x
Reference in New Issue
Block a user