mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-07 12:05:35 +00:00
Fix table and trending show.
This commit is contained in:
parent
0f31abd946
commit
3f983f334f
@ -1,3 +0,0 @@
|
||||
# 19.04.24
|
||||
|
||||
from .page import search
|
@ -1,127 +0,0 @@
|
||||
# 19.04.24
|
||||
|
||||
import time
|
||||
import logging
|
||||
from urllib.parse import quote_plus, urlparse, parse_qs
|
||||
|
||||
from typing import Generator, Optional
|
||||
|
||||
|
||||
# External libraries
|
||||
import httpx
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
def filter_result(link: str) -> Optional[str]:
|
||||
"""
|
||||
Filters search result links to remove unwanted ones.
|
||||
|
||||
Parameters:
|
||||
- link (str): The URL of the search result.
|
||||
|
||||
Returns:
|
||||
Optional[str]: The filtered URL if valid, None otherwise.
|
||||
"""
|
||||
try:
|
||||
|
||||
logging.info(f"Filter url: {link}")
|
||||
if link.startswith('/url?'):
|
||||
|
||||
# Extract the actual URL from Google's redirect link
|
||||
o = urlparse(link, 'http')
|
||||
link = parse_qs(o.query)['q'][0]
|
||||
|
||||
o = urlparse(link, 'http')
|
||||
|
||||
# Filter out Google links
|
||||
if o.netloc and 'google' not in o.netloc:
|
||||
return link
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def search(query: str, num: int = 10, stop: Optional[int] = None, pause: float = 2.0) -> Generator[str, None, None]:
|
||||
"""
|
||||
Performs a Google search and yields the URLs of search results.
|
||||
|
||||
Parameters:
|
||||
- query (str): The search query.
|
||||
- num (int): Number of results to fetch per request. Default is 10.
|
||||
- stop (int, optional): Total number of results to retrieve. Default is None.
|
||||
- pause (float): Pause duration between requests. Default is 2.0.
|
||||
|
||||
Yields:
|
||||
str: The URL of a search result.
|
||||
|
||||
Example:
|
||||
>>> for url in search("Python tutorials", num=5, stop=10):
|
||||
... print(url)
|
||||
...
|
||||
https://www.python.org/about/gettingstarted/
|
||||
"""
|
||||
|
||||
# Set to store unique URLs
|
||||
hashes = set()
|
||||
|
||||
# Counter for the number of fetched URLs
|
||||
count = 0
|
||||
|
||||
# Encode the query for URL
|
||||
query = quote_plus(query)
|
||||
|
||||
while not stop or count < stop:
|
||||
last_count = count
|
||||
|
||||
# Construct the Google search URL
|
||||
url = f"https://www.google.com/search?client=opera&q={query}&sourceid=opera&oe=UTF-8"
|
||||
|
||||
# Pause before making the request
|
||||
time.sleep(pause)
|
||||
|
||||
# Fetch the HTML content of the search page
|
||||
html = httpx.get(url).text
|
||||
soup = BeautifulSoup(html, 'html.parser')
|
||||
|
||||
try:
|
||||
# Find all anchor tags containing search result links
|
||||
anchors = soup.find(id='search').findAll('a')
|
||||
|
||||
except AttributeError:
|
||||
# Handle cases where search results are not found in the usual div
|
||||
gbar = soup.find(id='gbar')
|
||||
if gbar:
|
||||
gbar.clear()
|
||||
anchors = soup.findAll('a')
|
||||
|
||||
# Iterate over each anchor tag
|
||||
for a in anchors:
|
||||
try:
|
||||
link = a['href']
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
# Filter out unwanted links
|
||||
link = filter_result(link)
|
||||
if not link:
|
||||
continue
|
||||
|
||||
# Check for duplicate URLs
|
||||
h = hash(link)
|
||||
if h in hashes:
|
||||
continue
|
||||
hashes.add(h)
|
||||
|
||||
# Yield the valid URL
|
||||
yield link
|
||||
|
||||
# Increment the counter
|
||||
count += 1
|
||||
|
||||
# Check if the desired number of URLs is reached
|
||||
if stop and count >= stop:
|
||||
return
|
||||
|
||||
# Break the loop if no new URLs are found
|
||||
if last_count == count:
|
||||
break
|
@ -18,7 +18,7 @@ api_key = "a800ed6c93274fb857ea61bd9e7256c5"
|
||||
|
||||
|
||||
class TheMovieDB:
|
||||
def __init__(self, api_key, tv_show_manager):
|
||||
def __init__(self, api_key):
|
||||
"""
|
||||
Initialize the class with the API key and TV show manager.
|
||||
|
||||
@ -29,7 +29,6 @@ class TheMovieDB:
|
||||
self.api_key = api_key
|
||||
self.base_url = "https://api.themoviedb.org/3"
|
||||
self.console = Console()
|
||||
self.tv_show_manager = tv_show_manager
|
||||
self.genres = self._fetch_genres()
|
||||
|
||||
def _make_request(self, endpoint, params=None):
|
||||
@ -72,26 +71,27 @@ class TheMovieDB:
|
||||
- columns (list): A list of tuples, where each tuple contains the column name and the key to fetch the data from the dictionary.
|
||||
"""
|
||||
# Define column styles with colors
|
||||
tv_show_manager = TVShowManager()
|
||||
column_info = {
|
||||
col[0]: {'color': col[2] if len(col) > 2 else 'white'}
|
||||
for col in columns
|
||||
}
|
||||
self.tv_show_manager.add_column(column_info)
|
||||
tv_show_manager.add_column(column_info)
|
||||
|
||||
# Add each item to the TV show manager, including rank
|
||||
for index, item in enumerate(data):
|
||||
|
||||
# Convert genre IDs to genre names
|
||||
genre_names = [self.genres.get(genre_id, 'Unknown') for genre_id in item.get('genre_ids', [])]
|
||||
tv_show = {
|
||||
col[0]: str(item.get(col[1], 'N/A')) if col[1] != 'genre_ids' else ', '.join(genre_names)
|
||||
for col in columns if col[0] != 'Rank'
|
||||
for col in columns
|
||||
}
|
||||
# Add rank manually
|
||||
tv_show['Rank'] = str(index + 1)
|
||||
self.tv_show_manager.add_tv_show(tv_show)
|
||||
|
||||
tv_show_manager.add_tv_show(tv_show)
|
||||
|
||||
# Display the processed TV show data
|
||||
self.tv_show_manager.display_data(self.tv_show_manager.tv_shows[self.tv_show_manager.slice_start:self.tv_show_manager.slice_end])
|
||||
tv_show_manager.display_data(tv_show_manager.tv_shows[tv_show_manager.slice_start:tv_show_manager.slice_end])
|
||||
|
||||
def _display_with_title(self, title: str, data, columns):
|
||||
"""
|
||||
@ -111,13 +111,12 @@ class TheMovieDB:
|
||||
"""
|
||||
data = self._make_request("trending/tv/week").get("results", [])
|
||||
columns = [
|
||||
("Rank", None, 'yellow'),
|
||||
("Title", "name", 'cyan'),
|
||||
("First Air Date", "first_air_date", 'green'),
|
||||
("Popularity", "popularity", 'magenta'),
|
||||
("Genres", "genre_ids", 'blue'),
|
||||
("Origin Country", "origin_country", 'red'),
|
||||
("Vote Average", "vote_average", 'white')
|
||||
("Vote Average", "vote_average", 'yellow')
|
||||
]
|
||||
self._display_with_title("Trending TV Shows of the Week", data, columns)
|
||||
|
||||
@ -127,87 +126,13 @@ class TheMovieDB:
|
||||
"""
|
||||
data = self._make_request("trending/movie/week").get("results", [])
|
||||
columns = [
|
||||
("Rank", None, 'yellow'),
|
||||
("Title", "title", 'cyan'),
|
||||
("Release Date", "release_date", 'green'),
|
||||
("Popularity", "popularity", 'magenta'),
|
||||
("Genres", "genre_ids", 'blue'),
|
||||
("Vote Average", "vote_average", 'white')
|
||||
("Vote Average", "vote_average", 'yellow')
|
||||
]
|
||||
self._display_with_title("Trending Films of the Week", data, columns)
|
||||
|
||||
def display_recent_films(self):
|
||||
"""
|
||||
Fetch and display the films released recently.
|
||||
"""
|
||||
data = self._make_request("movie/now_playing").get("results", [])
|
||||
columns = [
|
||||
("Rank", None, 'yellow'),
|
||||
("Title", "title", 'cyan'),
|
||||
("Release Date", "release_date", 'green'),
|
||||
("Popularity", "popularity", 'magenta'),
|
||||
("Genres", "genre_ids", 'blue'),
|
||||
("Vote Average", "vote_average", 'white')
|
||||
]
|
||||
self._display_with_title("Recently Released Films", data, columns)
|
||||
|
||||
def display_recent_tv_shows(self):
|
||||
"""
|
||||
Fetch and display the TV shows airing recently.
|
||||
"""
|
||||
data = self._make_request("tv/on_the_air").get("results", [])
|
||||
columns = [
|
||||
("Rank", None, 'yellow'),
|
||||
("Title", "name", 'cyan'),
|
||||
("First Air Date", "first_air_date", 'green'),
|
||||
("Popularity", "popularity", 'magenta'),
|
||||
("Genres", "genre_ids", 'blue'),
|
||||
("Origin Country", "origin_country", 'red'),
|
||||
("Vote Average", "vote_average", 'white')
|
||||
]
|
||||
self._display_with_title("Recently Aired TV Shows", data, columns)
|
||||
|
||||
def search_by_genre(self, genre_id):
|
||||
"""
|
||||
Fetch and display TV shows based on genre.
|
||||
|
||||
Parameters:
|
||||
- genre_id (int): The genre ID to filter the results.
|
||||
"""
|
||||
endpoint = "discover/tv"
|
||||
params = {"with_genres": genre_id, "sort_by": "popularity.desc"}
|
||||
data = self._make_request(endpoint, params).get("results", [])
|
||||
columns = [
|
||||
("Rank", None, 'yellow'),
|
||||
("Title", "name", 'cyan'),
|
||||
("First Air Date", "first_air_date", 'green'),
|
||||
("Popularity", "popularity", 'magenta'),
|
||||
("Genres", "genre_ids", 'blue'),
|
||||
("Origin Country", "origin_country", 'red'),
|
||||
("Vote Average", "vote_average", 'white')
|
||||
]
|
||||
self._display_with_title(f"TV Shows by Genre {genre_id}", data, columns)
|
||||
|
||||
def search_by_title(self, title):
|
||||
"""
|
||||
Search and display TV shows by title.
|
||||
|
||||
Parameters:
|
||||
- title (str): The title to search for.
|
||||
"""
|
||||
endpoint = "search/tv"
|
||||
params = {"query": title}
|
||||
data = self._make_request(endpoint, params).get("results", [])
|
||||
columns = [
|
||||
("Rank", None, 'yellow'),
|
||||
("Title", "name", 'cyan'),
|
||||
("First Air Date", "first_air_date", 'green'),
|
||||
("Popularity", "popularity", 'magenta'),
|
||||
("Genres", "genre_ids", 'blue'),
|
||||
("Origin Country", "origin_country", 'red'),
|
||||
("Vote Average", "vote_average", 'white')
|
||||
]
|
||||
self._display_with_title(f"Search Results for: {title}", data, columns)
|
||||
|
||||
# Output
|
||||
tmdb = TheMovieDB(api_key, tv_show_manager)
|
||||
tmdb = TheMovieDB(api_key)
|
@ -102,7 +102,7 @@ class TVShowManager:
|
||||
|
||||
# Handling user input for loading more items or quitting
|
||||
if self.slice_end < total_items:
|
||||
self.console.print(f"\n\n[yellow][INFO] [green]Press [red]Enter [green]to restart, or [red]'q' [green]to quit.")
|
||||
self.console.print(f"\n\n[yellow][INFO] [green]Press [red]Enter [green]for next page, or [red]'q' [green]to quit.")
|
||||
|
||||
if not force_int_input:
|
||||
key = Prompt.ask("\n[cyan]Insert media [red]index [yellow]or [red](*) [cyan]to download all media [yellow]or [red][1-2] [cyan]or [red][3-*] [cyan]for a range of media")
|
||||
@ -118,8 +118,8 @@ class TVShowManager:
|
||||
break
|
||||
|
||||
elif key == "":
|
||||
self.slice_start += self.slice_end
|
||||
self.slice_end += self.slice_end
|
||||
self.slice_start += self.step
|
||||
self.slice_end += self.step
|
||||
if self.slice_end > total_items:
|
||||
self.slice_end = total_items
|
||||
|
||||
@ -127,7 +127,8 @@ class TVShowManager:
|
||||
break
|
||||
|
||||
else:
|
||||
self.console.print(f"\n\n[yellow][INFO] [red]You've reached the end. [green]Press [red]Enter [green]to restart, or [red]'q' [green]to quit.")
|
||||
# Last slice, ensure all remaining items are shown
|
||||
self.console.print(f"\n\n[yellow][INFO] [red]You've reached the end. [green]Press [red]Enter [green]for next page, or [red]'q' [green]to quit.")
|
||||
if not force_int_input:
|
||||
key = Prompt.ask("\n[cyan]Insert media [red]index [yellow]or [red](*) [cyan]to download all media [yellow]or [red][1-2] [cyan]or [red][3-*] [cyan]for a range of media")
|
||||
|
||||
|
@ -13,7 +13,8 @@
|
||||
"user": "admin",
|
||||
"pass": "adminadmin"
|
||||
},
|
||||
"not_close": false
|
||||
"not_close": false,
|
||||
"show_trending": true
|
||||
},
|
||||
"REQUESTS": {
|
||||
"timeout": 10,
|
||||
|
4
run.py
4
run.py
@ -23,6 +23,7 @@ from Src.Util.logger import Logger
|
||||
|
||||
# Config
|
||||
CLOSE_CONSOLE = config_manager.get_bool('DEFAULT', 'not_close')
|
||||
SHOW_TRENDING = config_manager.get_bool('DEFAULT', 'show_trending')
|
||||
|
||||
|
||||
def run_function(func: Callable[..., None], close_console: bool = False) -> None:
|
||||
@ -129,8 +130,11 @@ def initialize():
|
||||
console.log("[red]Error with loading github.")
|
||||
|
||||
# Show trending film and series
|
||||
if SHOW_TRENDING:
|
||||
tmdb.display_trending_films()
|
||||
print()
|
||||
tmdb.display_trending_tv_shows()
|
||||
print()
|
||||
|
||||
|
||||
def main():
|
||||
|
Loading…
x
Reference in New Issue
Block a user