Connect capture speed function to pid and resolve problem with back command

This commit is contained in:
Lovi 2024-12-31 11:16:41 +01:00
parent 288976bfcb
commit a2ecdeb061
20 changed files with 109 additions and 66 deletions

View File

@ -27,7 +27,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Search on database
len_database = title_search(quote_plus(string_to_search))

View File

@ -27,7 +27,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Search on database
len_database = title_search(quote_plus(string_to_search))

View File

@ -24,7 +24,7 @@ from .costant import SITE_NAME
def search(string_to_search: str = None, get_onylDatabase: bool = False):
if string_to_search is None:
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Search on database
len_database = title_search(quote_plus(string_to_search))

View File

@ -27,7 +27,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Search on database
len_database = title_search(quote_plus(string_to_search))

View File

@ -28,9 +28,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
# Make request to site to get content that corrsisponde to that string
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Search on database
len_database = title_search(quote_plus(string_to_search))

View File

@ -25,7 +25,6 @@ from StreamingCommunity.Api.Player.ddl import VideoSource
# Variable
from .costant import ROOT_PATH, SERIES_FOLDER
table_show_manager = TVShowManager()
@ -120,6 +119,7 @@ def display_episodes_list(obj_episode_manager) -> str:
"""
# Set up table for displaying episodes
table_show_manager = TVShowManager()
table_show_manager.set_slice_end(10)
# Add columns to the table

View File

@ -27,9 +27,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
# Make request to site to get content that corrsisponde to that string
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Search on database
len_database = title_search(quote_plus(string_to_search))

View File

@ -25,7 +25,6 @@ from StreamingCommunity.Api.Player.supervideo import VideoSource
# Variable
from .costant import ROOT_PATH, SERIES_FOLDER
table_show_manager = TVShowManager()
@ -171,8 +170,9 @@ def display_episodes_list(obj_episode_manager) -> str:
Returns:
last_command (str): Last command entered by the user.
"""
# Set up table for displaying episodes
table_show_manager = TVShowManager()
table_show_manager.set_slice_end(10)
# Add columns to the table

View File

@ -28,7 +28,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Search on database
len_database = asyncio.run(title_search(quote_plus(string_to_search)))

View File

@ -27,7 +27,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Not available for the moment
if get_onylDatabase:

View File

@ -28,7 +28,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
"""
if string_to_search is None:
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
string_to_search = msg.ask(f"\n[purple]Insert word to search in [green]{SITE_NAME}").strip()
# Get site domain and version and get result of the search
site_version, domain = get_version_and_domain()

View File

@ -25,7 +25,6 @@ from StreamingCommunity.Api.Player.vixcloud import VideoSource
# Variable
from .costant import ROOT_PATH, SITE_NAME, SERIES_FOLDER
table_show_manager = TVShowManager()
@ -178,6 +177,7 @@ def display_episodes_list(scrape_serie) -> str:
"""
# Set up table for displaying episodes
table_show_manager = TVShowManager()
table_show_manager.set_slice_end(10)
# Add columns to the table

View File

@ -176,9 +176,11 @@ class ContentExtractor:
set_language = DOWNLOAD_SPECIFIC_AUDIO
downloadable_languages = list(set(available_languages) & set(set_language))
console.print(f"[cyan bold]Audio →[/cyan bold] [green]Available:[/green] [purple]{', '.join(available_languages)}[/purple] | "
f"[red]Set:[/red] [purple]{', '.join(set_language)}[/purple] | "
f"[yellow]Downloadable:[/yellow] [purple]{', '.join(downloadable_languages)}[/purple]")
# Only show if there is something available
if len(available_languages) > 0:
console.print(f"[cyan bold]Audio →[/cyan bold] [green]Available:[/green] [purple]{', '.join(available_languages)}[/purple] | "
f"[red]Set:[/red] [purple]{', '.join(set_language)}[/purple] | "
f"[yellow]Downloadable:[/yellow] [purple]{', '.join(downloadable_languages)}[/purple]")
else:
console.log("[red]Can't find a list of audios")
@ -200,9 +202,11 @@ class ContentExtractor:
set_language = DOWNLOAD_SPECIFIC_SUBTITLE
downloadable_languages = list(set(available_languages) & set(set_language))
console.print(f"[cyan bold]Subtitle →[/cyan bold] [green]Available:[/green] [purple]{', '.join(available_languages)}[/purple] | "
f"[red]Set:[/red] [purple]{', '.join(set_language)}[/purple] | "
f"[yellow]Downloadable:[/yellow] [purple]{', '.join(downloadable_languages)}[/purple]")
# Only show if there is something available
if len(available_languages) > 0:
console.print(f"[cyan bold]Subtitle →[/cyan bold] [green]Available:[/green] [purple]{', '.join(available_languages)}[/purple] | "
f"[red]Set:[/red] [purple]{', '.join(set_language)}[/purple] | "
f"[yellow]Downloadable:[/yellow] [purple]{', '.join(downloadable_languages)}[/purple]")
else:
console.log("[red]Can't find a list of subtitles")
@ -212,13 +216,18 @@ class ContentExtractor:
It identifies the best video quality and displays relevant information to the user.
"""
logging.info(f"class 'ContentExtractor'; call _collect_video()")
set_resolution = "Best"
# Collect custom quality video if a specific resolution is set
if FILTER_CUSTOM_REOLUTION != -1:
self.m3u8_index, video_res = self.obj_parse._video.get_custom_uri(y_resolution=FILTER_CUSTOM_REOLUTION)
set_resolution = f"{FILTER_CUSTOM_REOLUTION}p"
# Otherwise, get the best available video quality
self.m3u8_index, video_res = self.obj_parse._video.get_best_uri()
else:
# Otherwise, get the best available video quality
self.m3u8_index, video_res = self.obj_parse._video.get_best_uri()
self.codec: M3U8_Codec = self.obj_parse.codec
# List all available resolutions
@ -227,19 +236,34 @@ class ContentExtractor:
logging.info(f"M3U8 index selected: {self.m3u8_index}, with resolution: {video_res}")
# Create a formatted table to display video info
console.print(f"[cyan bold]Video →[/cyan bold] [green]Available resolutions:[/green] [purple]{', '.join(list_available_resolution)}[/purple] | "
f"[yellow]Downloadable:[/yellow] [purple]{video_res[0]}x{video_res[1]}[/purple]")
console.print(f"[cyan bold]Video →[/cyan bold] [green]Available:[/green] [purple]{', '.join(list_available_resolution)}[/purple] | "
f"[red]Set:[/red] [purple]{set_resolution}[/purple] | "
f"[yellow]Downloadable:[/yellow] [purple]{video_res[0]}x{video_res[1]}[/purple]")
if self.codec is not None:
# Generate the string for available codec information
available_codec_info = (
f"[green]v[/green]: [yellow]{self.codec.video_codec_name}[/yellow] "
f"([green]b[/green]: [yellow]{self.codec.video_bitrate // 1000}k[/yellow]), "
f"[green]a[/green]: [yellow]{self.codec.audio_codec_name}[/yellow] "
f"([green]b[/green]: [yellow]{self.codec.audio_bitrate // 1000}k[/yellow])"
)
# Determine what to display for "Set"
# If the codec usage is enabled in the configuration, use the detailed codec info
# Otherwise, display "copy"
if config_manager.get_bool("M3U8_CONVERSION", "use_codec"):
codec_info = (f"[green]v[/green]: [yellow]{self.codec.video_codec_name}[/yellow] "
f"([green]b[/green]: [yellow]{self.codec.video_bitrate // 1000}k[/yellow]), "
f"[green]a[/green]: [yellow]{self.codec.audio_codec_name}[/yellow] "
f"([green]b[/green]: [yellow]{self.codec.audio_bitrate // 1000}k[/yellow])")
set_codec_info = available_codec_info
else:
codec_info = "[cyan]copy[/cyan]"
console.print(f"[bold cyan]Codec →[/bold cyan] {codec_info}")
set_codec_info = "[purple]copy[/purple]"
# Print the formatted result with "Available" and "Set" information
console.print(
f"[bold cyan]Codec →[/bold cyan] [green]Available:[/green] {available_codec_info} | "
f"[red]Set:[/red] {set_codec_info}"
)
# Fix the URL if it does not include the full protocol
if "http" not in self.m3u8_index:

View File

@ -544,11 +544,14 @@ class M3U8_Segments:
file_size = os.path.getsize(self.tmp_file_path)
if file_size == 0:
raise Exception("Output file is empty")
# Get expected time
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
# Display additional info only if there is failed segments
if self.info_nFailed > 0:
# Get expected time
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
if self.info_nRetry >= len(self.segments) * (1/3.33):
console.print("[yellow]⚠ Warning:[/yellow] Too many retries detected! Consider reducing the number of [cyan]workers[/cyan] in the [magenta]config.json[/magenta] file. This will impact [bold]performance[/bold]. \n")

View File

@ -17,7 +17,7 @@ crypto_installed = crypto_spec is not None
if crypto_installed:
console.print("[cyan]Decrypy use: Cryptodomex")
logging.info("[cyan]Decrypy use: Cryptodomex")
from Cryptodome.Cipher import AES
from Cryptodome.Util.Padding import unpad
@ -93,7 +93,7 @@ else:
# Check if openssl command is available
try:
openssl_available = subprocess.run(["openssl", "version"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL).returncode == 0
console.print("[cyan]Decrypy use: OPENSSL")
logging.info("[cyan]Decrypy use: OPENSSL")
except:
openssl_available = False

View File

@ -56,43 +56,69 @@ class M3U8_Ts_Estimator:
self.ts_file_sizes.append(size)
self.now_downloaded_size += size_download
def capture_speed(self, interval: float = 1):
def capture_speed(self, interval: float = 1, pid: int = None):
"""
Capture the internet speed periodically and store the values.
Capture the internet speed periodically for a specific process (identified by PID)
or the entire system if no PID is provided.
"""
def get_network_io():
"""Get network I/O counters, handle missing psutil gracefully."""
def get_network_io(process=None):
"""
Get network I/O counters for a specific process or system-wide if no process is specified.
"""
try:
io_counters = psutil.net_io_counters()
return io_counters
if process:
io_counters = process.io_counters()
return io_counters
else:
io_counters = psutil.net_io_counters()
return io_counters
except Exception as e:
logging.warning(f"Unable to access network I/O counters: {e}")
return None
while True:
old_value = get_network_io()
# If a PID is provided, attempt to attach to the corresponding process
process = None
if pid is not None:
try:
process = psutil.Process(pid)
except psutil.NoSuchProcess:
logging.error(f"Process with PID {pid} does not exist.")
return
except Exception as e:
logging.error(f"Failed to attach to process with PID {pid}: {e}")
return
if old_value is None: # If psutil is not available, continue with default values
while True:
old_value = get_network_io(process)
if old_value is None: # If psutil fails, continue with the next interval
time.sleep(interval)
continue
time.sleep(interval)
new_value = get_network_io()
new_value = get_network_io(process)
if new_value is None: # Handle again if psutil fails in the next call
time.sleep(interval)
continue
with self.lock:
upload_speed = (new_value.bytes_sent - old_value.bytes_sent) / interval
download_speed = (new_value.bytes_recv - old_value.bytes_recv) / interval
# Calculate speed based on process-specific counters if process is specified
if process:
upload_speed = (new_value.write_bytes - old_value.write_bytes) / interval
download_speed = (new_value.read_bytes - old_value.read_bytes) / interval
else:
# System-wide counters
upload_speed = (new_value.bytes_sent - old_value.bytes_sent) / interval
download_speed = (new_value.bytes_recv - old_value.bytes_recv) / interval
self.speed = {
"upload": internet_manager.format_transfer_speed(upload_speed),
"download": internet_manager.format_transfer_speed(download_speed)
}
def get_average_speed(self) -> float:
"""
Calculate the average internet speed.

View File

@ -57,11 +57,10 @@ def update():
# Check installed version
if str(__version__).replace('v', '') != str(last_version).replace('v', '') :
console.print(f"[red]New version available: [yellow]{last_version}")
console.print(f"[red]New version available: [yellow]{last_version} \n")
else:
console.print(f"[red]Everything is up to date")
console.print(f" [yellow]Everything is up to date \n")
console.print("\n")
console.print(f"[red]{__title__} has been downloaded [yellow]{total_download_count} [red]times, but only [yellow]{percentual_stars}% [red]of users have starred it.\n\
[cyan]Help the repository grow today by leaving a [yellow]star [cyan]and [yellow]sharing [cyan]it with others online!")

View File

@ -317,16 +317,13 @@ class InternManager():
def check_internet():
while True:
try:
httpx.get("https://www.google.com")
#console.log("[bold green]Internet is available![/bold green]")
httpx.get("https://www.google.com", timeout=15)
break
except urllib.error.URLError:
console.log("[bold red]Internet is not available. Waiting...[/bold red]")
time.sleep(5)
print()
class OsSummary:

View File

@ -118,11 +118,10 @@ def initialize():
sys.exit(0)
# Attempting GitHub update
try:
"""try:
git_update()
print()
except:
console.log("[red]Error with loading github.")
console.log("[red]Error with loading github.")"""
def main():

View File

@ -1,5 +1,4 @@
httpx
cffi
bs4
rich
tqdm