core: Remove function "check_internet"

This commit is contained in:
Lovi 2025-02-26 17:07:45 +01:00
parent 45f39b36c3
commit 4a841692e7
6 changed files with 62 additions and 153 deletions

View File

@ -6,7 +6,7 @@ from typing import Tuple
# External library
from rich.console import Console
from rich.prompt import Prompt, Confirm
from rich.prompt import Prompt
# Internal utilities
@ -15,7 +15,7 @@ from StreamingCommunity.Lib.Downloader import HLS_Downloader
from StreamingCommunity.TelegramHelp.telegram_bot import TelegramSession, get_bot_instance
# Logic class
from .util.ScrapeSerie import ScrapeSerie
from .util.ScrapeSerie import GetSerieInfo
from StreamingCommunity.Api.Template.Util import (
manage_selection,
map_episode_title,
@ -37,7 +37,7 @@ msg = Prompt()
console = Console()
def download_video(index_season_selected: int, index_episode_selected: int, scrape_serie: ScrapeSerie, video_source: VideoSource) -> Tuple[str,bool]:
def download_video(index_season_selected: int, index_episode_selected: int, scrape_serie: GetSerieInfo, video_source: VideoSource) -> Tuple[str,bool]:
"""
Download a single episode video.
@ -94,7 +94,7 @@ def download_video(index_season_selected: int, index_episode_selected: int, scra
return r_proc['path'], r_proc['stopped']
def download_episode(index_season_selected: int, scrape_serie: ScrapeSerie, video_source: VideoSource, download_all: bool = False) -> None:
def download_episode(index_season_selected: int, scrape_serie: GetSerieInfo, video_source: VideoSource, download_all: bool = False) -> None:
"""
Download episodes of a selected season.
@ -156,7 +156,7 @@ def download_series(select_season: MediaItem) -> None:
start_message()
# Init class
scrape_serie = ScrapeSerie(site_constant.FULL_URL)
scrape_serie = GetSerieInfo(site_constant.FULL_URL)
video_source = VideoSource(site_constant.FULL_URL, True)
# Setup video source

View File

@ -19,7 +19,7 @@ from StreamingCommunity.Api.Player.Helper.Vixcloud.util import Season, EpisodeMa
max_timeout = config_manager.get_int("REQUESTS", "timeout")
class ScrapeSerie:
class GetSerieInfo:
def __init__(self, url):
"""
Initialize the ScrapeSerie class for scraping TV series information.

View File

@ -1,7 +1,6 @@
# 18.06.24
import ssl
import time
import certifi
from urllib.parse import urlparse, unquote
@ -18,6 +17,7 @@ from StreamingCommunity.Util.config_json import config_manager
# Variable
console = Console()
VERIFY = config_manager.get("REQUESTS", "verify")
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
def get_tld(url_str):
@ -54,16 +54,7 @@ def get_base_domain(url_str):
except Exception:
return None
def get_base_url(url_str):
"""Extract base URL including protocol and domain, removing path and query parameters."""
try:
parsed = urlparse(url_str)
return f"{parsed.scheme}://{parsed.netloc}"
except Exception:
return None
def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
def validate_url(url, base_url):
"""Validate if URL is accessible and matches expected base domain."""
console.print(f"\n[cyan]Starting validation for URL[white]: [yellow]{url}")
@ -75,58 +66,34 @@ def validate_url(url, base_url, max_timeout, max_retries=2, sleep=1):
console.print(f"[red]Domain structure mismatch: {url_domain} != {base_domain}")
return False, None
# Count dots to ensure we don't have extra subdomains
base_dots = base_url.count('.')
url_dots = url.count('.')
if url_dots > base_dots + 1:
console.print(f"[red]Too many subdomains in URL")
return False, None
client = httpx.Client(
verify=VERIFY,
headers=get_headers(),
timeout=max_timeout
timeout=MAX_TIMEOUT
)
for retry in range(max_retries):
try:
time.sleep(sleep)
# Initial check without redirects
response = client.get(url, follow_redirects=False)
if response.status_code == 403:
console.print(f"[red]Check failed (403) - Attempt {retry + 1}/{max_retries}")
continue
if response.status_code >= 400:
console.print(f"[red]Check failed: HTTP {response.status_code}")
return False, None
return True, None
except Exception as e:
console.print(f"[red]Connection error: {str(e)}")
time.sleep(sleep)
continue
return False, None
# Make request to web site
response = client.get(url, follow_redirects=False)
if response.status_code >= 400:
console.print(f"[red]Check failed: HTTP {response.status_code}")
console.print(f"[red]Response content: {response.text}")
return False, None
return True, base_domain
def search_domain(site_name: str, base_url: str, get_first: bool = False):
"""Search for valid domain matching site name and base URL."""
max_timeout = config_manager.get_int("REQUESTS", "timeout")
"""Search for valid domain matching site name and base URL."""
try:
is_correct, redirect_tld = validate_url(base_url, base_url, max_timeout)
is_correct, redirect_tld = validate_url(base_url, base_url)
if is_correct:
tld = redirect_tld or get_tld(base_url)
config_manager.configSite[site_name]['domain'] = tld
#console.print(f"[green]Successfully validated initial URL")
return tld, base_url
else:
return None, None
except Exception as e:
console.print(f"[red]Error testing initial URL: {str(e)}")
console.print(f"[red]Error testing initial URL: {str(e)}")
return None, None

View File

@ -153,7 +153,7 @@ class TheMovieDB:
# Join with colored arrows and print with proper category label
console.print(
f"[bold purple]{category}:[/] {' [red][/] '.join(colored_items)}"
f"[bold purple]{category}:[/] {' [red]->[/] '.join(colored_items)}"
)
def display_trending_tv_shows(self):

View File

@ -14,6 +14,8 @@ from rich.console import Console
# Variable
console = Console()
download_site_data = True
validate_github_config = True
class ConfigManager:
@ -35,16 +37,40 @@ class ConfigManager:
self.cache = {}
self.reference_config_url = 'https://raw.githubusercontent.com/Arrowar/StreamingCommunity/refs/heads/main/config.json'
# Validate and update config before proceeding
self._validate_and_update_config()
# Read initial config to get use_api setting
self._read_initial_config()
# Validate and update config before proceeding (if enabled)
if validate_github_config:
self._validate_and_update_config()
console.print(f"[bold cyan]ConfigManager initialized:[/bold cyan] [green]{self.file_path}[/green]")
def _read_initial_config(self) -> None:
"""Read initial configuration to get use_api setting."""
try:
if os.path.exists(self.file_path):
with open(self.file_path, 'r') as f:
self.config = json.load(f)
self.use_api = self.config.get('DEFAULT', {}).get('use_api', True)
console.print(f"[bold cyan]API usage setting:[/bold cyan] [{'green' if self.use_api else 'yellow'}]{self.use_api}[/{'green' if self.use_api else 'yellow'}]")
console.print(f"[bold cyan]Download site data:[/bold cyan] [{'green' if download_site_data else 'yellow'}]{download_site_data}[/{'green' if download_site_data else 'yellow'}]")
console.print(f"[bold cyan]Validate GitHub config:[/bold cyan] [{'green' if validate_github_config else 'yellow'}]{validate_github_config}[/{'green' if validate_github_config else 'yellow'}]")
else:
self.use_api = True
console.print("[bold yellow]Configuration file not found. Using default API setting: True[/bold yellow]")
console.print(f"[bold yellow]Download site data: {download_site_data}[/bold yellow]")
console.print(f"[bold yellow]Validate GitHub config: {validate_github_config}[/bold yellow]")
except Exception as e:
self.use_api = True
console.print("[bold red]Error reading API setting. Using default: True[/bold red]")
def _validate_and_update_config(self) -> None:
"""Validate local config against reference config and update missing keys."""
try:
# Load local config if exists
local_config = {}
if os.path.exists(self.file_path):
@ -56,7 +82,7 @@ class ConfigManager:
console.print(f"[bold cyan]Downloading reference config from:[/bold cyan] [green]{self.reference_config_url}[/green]")
response = requests.get(self.reference_config_url, timeout=10)
if response.ok:
if not response.ok:
raise Exception(f"Failed to download reference config. Status code: {response.status_code}")
reference_config = response.json()
@ -135,24 +161,6 @@ class ConfigManager:
return merged
def _read_initial_config(self) -> None:
"""Read initial configuration to get use_api setting."""
try:
if os.path.exists(self.file_path):
with open(self.file_path, 'r') as f:
self.config = json.load(f)
self.use_api = self.config.get('DEFAULT', {}).get('use_api', True)
console.print(f"[bold cyan]API usage setting:[/bold cyan] [{'green' if self.use_api else 'yellow'}]{self.use_api}[/{'green' if self.use_api else 'yellow'}]")
else:
self.use_api = True
console.print("[bold yellow]Configuration file not found. Using default API setting: True[/bold yellow]")
except Exception as e:
self.use_api = True
console.print("[bold red]Error reading API setting. Using default: True[/bold red]")
def read_config(self) -> None:
"""Read the configuration file."""
try:
@ -174,8 +182,14 @@ class ConfigManager:
self.config = json.load(f)
console.print(f"[bold green]Configuration downloaded and saved:[/bold green] {len(self.config)} keys")
# Update site configuration separately
self.update_site_config()
# Read API setting again in case it was updated in the downloaded config
self.use_api = self.config.get('DEFAULT', {}).get('use_api', self.use_api)
# Update site configuration separately if enabled
if download_site_data:
self.update_site_config()
else:
console.print("[bold yellow]Site data download is disabled[/bold yellow]")
console.print("[bold cyan]Configuration processing complete[/bold cyan]")

View File

@ -287,17 +287,6 @@ class InternManager():
else:
return f"{bytes / (1024 * 1024):.2f} MB/s"
@staticmethod
def check_internet():
while True:
try:
httpx.get("https://www.google.com", timeout=5)
break
except Exception as e:
console.log("[bold red]Internet is not available. Waiting...[/bold red]")
time.sleep(2)
class OsSummary:
def __init__(self):
@ -346,32 +335,6 @@ class OsSummary:
except importlib.metadata.PackageNotFoundError:
return f"{lib_name}-not installed"
def download_requirements(self, url: str, filename: str):
"""
Download the requirements.txt file from the specified URL if not found locally using requests.
Args:
url (str): The URL to download the requirements file from.
filename (str): The local filename to save the requirements file as.
"""
try:
import requests
logging.info(f"{filename} not found locally. Downloading from {url}...")
response = requests.get(url)
if response.status_code == 200:
with open(filename, 'wb') as f:
f.write(response.content)
else:
logging.error(f"Failed to download {filename}. HTTP Status code: {response.status_code}")
sys.exit(0)
except Exception as e:
logging.error(f"Failed to download {filename}: {e}")
sys.exit(0)
def install_library(self, lib_name: str):
"""
Install a Python library using pip.
@ -402,16 +365,6 @@ class OsSummary:
def get_system_summary(self):
self.check_python_version()
InternManager().check_internet()
# Python info
python_version = sys.version.split()[0]
python_implementation = platform.python_implementation()
arch = platform.machine()
os_info = platform.platform()
glibc_version = 'glibc ' + '.'.join(map(str, platform.libc_ver()[1]))
console.print(f"[cyan]Python: [bold red]{python_version} ({python_implementation} {arch}) - {os_info} ({glibc_version})[/bold red]")
# FFmpeg detection
binary_dir = self.get_binary_directory()
@ -457,31 +410,6 @@ class OsSummary:
console.print(f"[cyan]Path: [red]ffmpeg [bold yellow]'{self.ffmpeg_path}'[/bold yellow][white], [red]ffprobe '[bold yellow]{self.ffprobe_path}'[/bold yellow]")
# Handle requirements.txt
if not getattr(sys, 'frozen', False):
requirements_file = 'requirements.txt'
requirements_file = Path(__file__).parent.parent.parent / requirements_file
if not os.path.exists(requirements_file):
self.download_requirements(
'https://raw.githubusercontent.com/Arrowar/StreamingCommunity/refs/heads/main/requirements.txt',
requirements_file
)
optional_libraries = [line.strip().split("=")[0] for line in open(requirements_file, 'r', encoding='utf-8-sig')]
for lib in optional_libraries:
installed_version = self.get_library_version(lib.split("<")[0])
if 'not installed' in installed_version:
user_response = msg.ask(f"{lib} is not installed. Do you want to install it? (yes/no)", default="y")
if user_response.lower().strip() in ["yes", "y"]:
self.install_library(lib)
else:
logging.info(f"Library: {installed_version}")
logging.info(f"Libraries: {', '.join([self.get_library_version(lib) for lib in optional_libraries])}")
os_manager = OsManager()
internet_manager = InternManager()