mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-05 02:55:25 +00:00
Migrate to fastapi.
This commit is contained in:
parent
172b09ea46
commit
c093687e9f
@ -1,17 +1,22 @@
|
||||
# 23.11.24
|
||||
|
||||
import re
|
||||
import logging
|
||||
from typing import Dict, Any, List, Union
|
||||
|
||||
|
||||
class Episode:
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.id: int = data.get('id', '')
|
||||
self.number: int = data.get('number', '')
|
||||
self.name: str = data.get('name', '')
|
||||
self.plot: str = data.get('plot', '')
|
||||
self.duration: int = data.get('duration', '')
|
||||
self.images = None
|
||||
self.data = data
|
||||
|
||||
self.id: int = data.get('id')
|
||||
self.scws_id: int = data.get('scws_id')
|
||||
self.number: int = data.get('number')
|
||||
self.name: str = data.get('name')
|
||||
self.plot: str = data.get('plot')
|
||||
self.duration: int = data.get('duration')
|
||||
|
||||
def collect_image(self, SITE_NAME, domain):
|
||||
self.image = f"https://cdn.{SITE_NAME}.{domain}/images/{self.data.get('images')[0]['filename']}"
|
||||
|
||||
def __str__(self):
|
||||
return f"Episode(id={self.id}, number={self.number}, name='{self.name}', plot='{self.plot}', duration={self.duration} sec)"
|
||||
@ -20,7 +25,7 @@ class EpisodeManager:
|
||||
def __init__(self):
|
||||
self.episodes: List[Episode] = []
|
||||
|
||||
def add_episode(self, episode_data: Dict[str, Any]):
|
||||
def add(self, episode_data: Dict[str, Any]):
|
||||
"""
|
||||
Add a new episode to the manager.
|
||||
|
||||
@ -29,8 +34,20 @@ class EpisodeManager:
|
||||
"""
|
||||
episode = Episode(episode_data)
|
||||
self.episodes.append(episode)
|
||||
|
||||
def get(self, index: int) -> Episode:
|
||||
"""
|
||||
Retrieve an episode by its index in the episodes list.
|
||||
|
||||
Parameters:
|
||||
- index (int): The zero-based index of the episode to retrieve.
|
||||
|
||||
Returns:
|
||||
Episode: The Episode object at the specified index.
|
||||
"""
|
||||
return self.episodes[index]
|
||||
|
||||
def get_length(self) -> int:
|
||||
def length(self) -> int:
|
||||
"""
|
||||
Get the number of episodes in the manager.
|
||||
|
||||
@ -54,61 +71,23 @@ class EpisodeManager:
|
||||
|
||||
class Season:
|
||||
def __init__(self, season_data: Dict[str, Union[int, str, None]]):
|
||||
self.images = {}
|
||||
self.season_data = season_data
|
||||
|
||||
self.id: int = season_data.get('id')
|
||||
self.scws_id: int = season_data.get('scws_id')
|
||||
self.imdb_id: int = season_data.get('imdb_id')
|
||||
self.number: int = season_data.get('number')
|
||||
self.name: str = season_data.get('name')
|
||||
self.slug: str = season_data.get('slug')
|
||||
self.plot: str = season_data.get('plot')
|
||||
self.episodes_count: int = season_data.get('episodes_count')
|
||||
|
||||
def __str__(self):
|
||||
return f"Season(id={self.id}, number={self.number}, name='{self.name}', plot='{self.plot}', episodes_count={self.episodes_count})"
|
||||
|
||||
class SeasonManager:
|
||||
def __init__(self):
|
||||
self.seasons: List[Season] = []
|
||||
|
||||
def add_season(self, season_data: Dict[str, Union[int, str, None]]):
|
||||
"""
|
||||
Add a new season to the manager.
|
||||
|
||||
Parameters:
|
||||
season_data (Dict[str, Union[int, str, None]]): A dictionary containing data for the new season.
|
||||
"""
|
||||
season = Season(season_data)
|
||||
self.seasons.append(season)
|
||||
|
||||
def get(self, index: int) -> Season:
|
||||
"""
|
||||
Get a season item from the list by index.
|
||||
|
||||
Parameters:
|
||||
index (int): The index of the seasons item to retrieve.
|
||||
|
||||
Returns:
|
||||
Season: The media item at the specified index.
|
||||
"""
|
||||
return self.media_list[index]
|
||||
|
||||
def get_length(self) -> int:
|
||||
"""
|
||||
Get the number of seasons in the manager.
|
||||
|
||||
Returns:
|
||||
int: Number of seasons.
|
||||
"""
|
||||
return len(self.seasons)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""
|
||||
This method clears the seasons list.
|
||||
|
||||
Parameters:
|
||||
self: The object instance.
|
||||
"""
|
||||
self.seasons.clear()
|
||||
|
||||
def __str__(self):
|
||||
return f"SeasonManager(num_seasons={len(self.seasons)})"
|
||||
self.type: str = season_data.get('type')
|
||||
self.seasons_count: int = season_data.get('seasons_count')
|
||||
self.episodes: EpisodeManager = EpisodeManager()
|
||||
|
||||
def collect_images(self, SITE_NAME, domain):
|
||||
for dict_image in self.season_data.get('images'):
|
||||
self.images[dict_image.get('type')] = f"https://cdn.{SITE_NAME}.{domain}/images/{dict_image.get('filename')}"
|
||||
|
||||
|
||||
class Stream:
|
||||
|
@ -120,8 +120,7 @@ class VideoSource:
|
||||
response.raise_for_status()
|
||||
|
||||
except Exception as e:
|
||||
print("\n")
|
||||
console.print(Panel("[red bold]Coming soon", title="Notification", title_align="left", border_style="yellow"))
|
||||
logging.error(f"Failed to get vixcloud contente with error: {e}")
|
||||
sys.exit(0)
|
||||
|
||||
# Parse response with BeautifulSoup to get content
|
||||
@ -169,6 +168,56 @@ class VideoSource:
|
||||
# Construct the new URL with updated query parameters
|
||||
return urlunparse(parsed_url._replace(query=query_string))
|
||||
|
||||
def get_mp4(self, url_to_download: str, scws_id: str) -> list:
|
||||
"""
|
||||
Generate download links for the specified resolutions from StreamingCommunity.
|
||||
|
||||
Args:
|
||||
url_to_download (str): URL of the video page.
|
||||
scws_id (str): SCWS ID of the title.
|
||||
|
||||
Returns:
|
||||
list: A list of video download URLs.
|
||||
"""
|
||||
headers = {
|
||||
'referer': url_to_download,
|
||||
'user-agent': get_headers(),
|
||||
}
|
||||
|
||||
# API request to get video details
|
||||
video_api_url = f'https://{self.base_name}.{self.domain}/api/video/{scws_id}'
|
||||
response = httpx.get(video_api_url, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
response_json = response.json()
|
||||
|
||||
video_tracks = response_json.get('video_tracks', [])
|
||||
track = video_tracks[-1]
|
||||
console.print(f"[cyan]Available resolutions: [red]{[str(track['quality']) for track in video_tracks]}")
|
||||
|
||||
# Request download link generation for each track
|
||||
download_response = httpx.post(
|
||||
url=f'https://{self.base_name}.{self.domain}/api/download/generate_link?scws_id={track["video_id"]}&rendition={track["quality"]}',
|
||||
headers={
|
||||
'referer': url_to_download,
|
||||
'user-agent': get_headers(),
|
||||
'x-xsrf-token': config_manager.get("SITE", self.base_name)['extra']['x-xsrf-token']
|
||||
},
|
||||
cookies={
|
||||
'streamingcommunity_session': config_manager.get("SITE", self.base_name)['extra']['streamingcommunity_session']
|
||||
}
|
||||
)
|
||||
|
||||
if download_response.status_code == 200:
|
||||
return {'url': download_response.text, 'quality': track["quality"]}
|
||||
|
||||
else:
|
||||
logging.error(f"Failed to generate link for resolution {track['quality']} (HTTP {download_response.status_code}).")
|
||||
|
||||
else:
|
||||
logging.error(f"Error fetching video API URL (HTTP {response.status_code}).")
|
||||
return []
|
||||
|
||||
|
||||
class VideoSourceAnime(VideoSource):
|
||||
def __init__(self, site_name: str):
|
||||
@ -221,4 +270,4 @@ class VideoSourceAnime(VideoSource):
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error fetching embed URL: {e}")
|
||||
return None
|
||||
return None
|
@ -19,6 +19,7 @@ _useFor = "film_serie"
|
||||
_deprecate = False
|
||||
_priority = 1
|
||||
_engineDownload = "hls"
|
||||
from .costant import SITE_NAME
|
||||
|
||||
|
||||
def search(string_to_search: str = None, get_onylDatabase: bool = False):
|
||||
@ -27,7 +28,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
|
||||
"""
|
||||
|
||||
if string_to_search is None:
|
||||
string_to_search = msg.ask("\n[purple]Insert word to search in all site").strip()
|
||||
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
|
||||
|
||||
# Get site domain and version and get result of the search
|
||||
site_version, domain = get_version_and_domain()
|
||||
|
@ -52,13 +52,9 @@ def download_film(select_title: MediaItem):
|
||||
mp4_path = os.path.join(ROOT_PATH, SITE_NAME, MOVIE_FOLDER, select_title.slug)
|
||||
|
||||
# Download the film using the m3u8 playlist, and output filename
|
||||
r_proc = HLS_Downloader(
|
||||
HLS_Downloader(
|
||||
m3u8_playlist=master_playlist,
|
||||
output_filename=os.path.join(mp4_path, title_name)
|
||||
).start()
|
||||
|
||||
if r_proc != None:
|
||||
console.print("[green]Result: ")
|
||||
console.print(r_proc)
|
||||
|
||||
return os.path.join(mp4_path, title_name)
|
||||
|
@ -40,15 +40,16 @@ def download_video(tv_name: str, index_season_selected: int, index_episode_selec
|
||||
"""
|
||||
|
||||
start_message()
|
||||
tv_name = scrape_serie.season_manager.slug
|
||||
|
||||
# Get info about episode
|
||||
obj_episode = scrape_serie.obj_episode_manager.episodes[index_episode_selected - 1]
|
||||
obj_episode = scrape_serie.episode_manager.get(index_episode_selected - 1)
|
||||
console.print(f"[yellow]Download: [red]{index_season_selected}:{index_episode_selected} {obj_episode.name}")
|
||||
print()
|
||||
|
||||
# Define filename and path for the downloaded video
|
||||
mp4_name = f"{map_episode_title(tv_name, index_season_selected, index_episode_selected, obj_episode.name)}.mp4"
|
||||
mp4_path = os.path.join(ROOT_PATH, SITE_NAME, SERIES_FOLDER, tv_name, f"S{index_season_selected}")
|
||||
mp4_path = os.path.join(ROOT_PATH, SITE_NAME, SERIES_FOLDER, tv_name, f"S{index_season_selected}")
|
||||
|
||||
# Retrieve scws and if available master playlist
|
||||
video_source.get_iframe(obj_episode.id)
|
||||
@ -56,14 +57,10 @@ def download_video(tv_name: str, index_season_selected: int, index_episode_selec
|
||||
master_playlist = video_source.get_playlist()
|
||||
|
||||
# Download the episode
|
||||
r_proc = HLS_Downloader(
|
||||
HLS_Downloader(
|
||||
m3u8_playlist=master_playlist,
|
||||
output_filename=os.path.join(mp4_path, mp4_name)
|
||||
).start()
|
||||
|
||||
if r_proc != None:
|
||||
console.print("[green]Result: ")
|
||||
console.print(r_proc)
|
||||
|
||||
return os.path.join(mp4_path, mp4_name)
|
||||
|
||||
@ -78,13 +75,12 @@ def download_episode(tv_name: str, index_season_selected: int, scrape_serie: Scr
|
||||
"""
|
||||
|
||||
# Clean memory of all episodes and get the number of the season
|
||||
scrape_serie.obj_episode_manager.clear()
|
||||
season_number = scrape_serie.obj_season_manager.seasons[index_season_selected - 1].number
|
||||
scrape_serie.episode_manager.clear()
|
||||
|
||||
# Start message and collect information about episodes
|
||||
start_message()
|
||||
scrape_serie.collect_title_season(season_number)
|
||||
episodes_count = scrape_serie.obj_episode_manager.get_length()
|
||||
scrape_serie.collect_info_season(index_season_selected)
|
||||
episodes_count = scrape_serie.episode_manager.length()
|
||||
|
||||
if download_all:
|
||||
|
||||
@ -131,8 +127,8 @@ def download_series(select_season: MediaItem, version: str) -> None:
|
||||
video_source.setup(select_season.id)
|
||||
|
||||
# Collect information about seasons
|
||||
scrape_serie.collect_info_seasons()
|
||||
seasons_count = scrape_serie.obj_season_manager.get_length()
|
||||
scrape_serie.collect_info_title()
|
||||
seasons_count = scrape_serie.season_manager.seasons_count
|
||||
|
||||
# Prompt user for season selection and download episodes
|
||||
console.print(f"\n[green]Seasons found: [red]{seasons_count}")
|
||||
@ -182,7 +178,7 @@ def display_episodes_list(scrape_serie) -> str:
|
||||
table_show_manager.add_column(column_info)
|
||||
|
||||
# Populate the table with episodes information
|
||||
for i, media in enumerate(scrape_serie.obj_episode_manager.episodes):
|
||||
for i, media in enumerate(scrape_serie.episode_manager.episodes):
|
||||
table_show_manager.add_tv_show({
|
||||
'Index': str(media.number),
|
||||
'Name': media.name,
|
||||
|
@ -3,6 +3,7 @@
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
import secrets
|
||||
|
||||
|
||||
# External libraries
|
||||
@ -31,7 +32,7 @@ from .costant import SITE_NAME
|
||||
# Variable
|
||||
media_search_manager = MediaManager()
|
||||
table_show_manager = TVShowManager()
|
||||
|
||||
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
||||
|
||||
|
||||
def get_version(text: str):
|
||||
@ -52,7 +53,7 @@ def get_version(text: str):
|
||||
|
||||
# Extract version
|
||||
version = json.loads(soup.find("div", {"id": "app"}).get("data-page"))['version']
|
||||
#console.print(f"[cyan]Get version [white]=> [red]{version} \n")
|
||||
console.print(f"[cyan]Get version [white]=> [red]{version} \n")
|
||||
|
||||
return version
|
||||
|
||||
@ -74,7 +75,17 @@ def get_version_and_domain():
|
||||
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
||||
|
||||
# Extract version from the response
|
||||
version = get_version(httpx.get(base_url, headers={'user-agent': get_headers()}).text)
|
||||
try:
|
||||
version = get_version(httpx.get(
|
||||
url=base_url,
|
||||
headers={
|
||||
'user-agent': get_headers()
|
||||
},
|
||||
timeout=max_timeout
|
||||
).text)
|
||||
except:
|
||||
console.print("[green]Auto generate version ...")
|
||||
version = secrets.token_hex(32 // 2)
|
||||
|
||||
return version, domain_to_use
|
||||
|
||||
@ -90,10 +101,6 @@ def title_search(title_search: str, domain: str) -> int:
|
||||
Returns:
|
||||
int: The number of titles found.
|
||||
"""
|
||||
|
||||
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
||||
|
||||
# Send request to search for titles ( replace à to a and space to "+" )
|
||||
try:
|
||||
response = httpx.get(
|
||||
url=f"https://{SITE_NAME}.{domain}/api/search?q={title_search.replace(' ', '+')}",
|
||||
@ -112,6 +119,7 @@ def title_search(title_search: str, domain: str) -> int:
|
||||
'slug': dict_title.get('slug'),
|
||||
'name': dict_title.get('name'),
|
||||
'type': dict_title.get('type'),
|
||||
'date': dict_title.get('last_air_date'),
|
||||
'score': dict_title.get('score')
|
||||
})
|
||||
|
||||
|
@ -10,7 +10,7 @@ import httpx
|
||||
# Internal utilities
|
||||
from StreamingCommunity.Util.headers import get_headers
|
||||
from StreamingCommunity.Util._jsonConfig import config_manager
|
||||
from StreamingCommunity.Api.Player.Helper.Vixcloud.util import SeasonManager, EpisodeManager
|
||||
from StreamingCommunity.Api.Player.Helper.Vixcloud.util import Season, EpisodeManager
|
||||
|
||||
|
||||
# Variable
|
||||
@ -26,7 +26,7 @@ class ScrapeSerie:
|
||||
site_name (str): Name of the streaming site to scrape from
|
||||
"""
|
||||
self.is_series = False
|
||||
self.headers = {}
|
||||
self.headers = {'user-agent': get_headers()}
|
||||
self.base_name = site_name
|
||||
self.domain = config_manager.get_dict('SITE', self.base_name)['domain']
|
||||
|
||||
@ -46,23 +46,22 @@ class ScrapeSerie:
|
||||
if series_name is not None:
|
||||
self.is_series = True
|
||||
self.series_name = series_name
|
||||
self.obj_season_manager: SeasonManager = SeasonManager()
|
||||
self.obj_episode_manager: EpisodeManager = EpisodeManager()
|
||||
|
||||
# Create headers
|
||||
self.headers = {
|
||||
'user-agent': get_headers(),
|
||||
'x-inertia': 'true',
|
||||
'x-inertia-version': self.version,
|
||||
}
|
||||
self.season_manager = None
|
||||
self.episode_manager: EpisodeManager = EpisodeManager()
|
||||
|
||||
def collect_info_seasons(self) -> None:
|
||||
def collect_info_title(self) -> None:
|
||||
"""
|
||||
Retrieve season information for a TV series from the streaming site.
|
||||
|
||||
Raises:
|
||||
Exception: If there's an error fetching season information
|
||||
"""
|
||||
self.headers = {
|
||||
'user-agent': get_headers(),
|
||||
'x-inertia': 'true',
|
||||
'x-inertia-version': self.version,
|
||||
}
|
||||
|
||||
try:
|
||||
|
||||
response = httpx.get(
|
||||
@ -73,17 +72,22 @@ class ScrapeSerie:
|
||||
response.raise_for_status()
|
||||
|
||||
# Extract seasons from JSON response
|
||||
json_response = response.json().get('props', {}).get('title', {}).get('seasons', [])
|
||||
|
||||
# Add each season to the season manager
|
||||
for dict_season in json_response:
|
||||
self.obj_season_manager.add_season(dict_season)
|
||||
json_response = response.json().get('props')
|
||||
|
||||
# Collect info about season
|
||||
self.season_manager = Season(json_response.get('title'))
|
||||
self.season_manager.collect_images(self.base_name, self.domain)
|
||||
|
||||
# Collect first episode info
|
||||
for i, ep in enumerate(json_response.get('loadedSeason').get('episodes')):
|
||||
self.season_manager.episodes.add(ep)
|
||||
self.season_manager.episodes.get(i).collect_image(self.base_name, self.domain)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error collecting season info: {e}")
|
||||
raise
|
||||
|
||||
def collect_title_season(self, number_season: int) -> None:
|
||||
def collect_info_season(self, number_season: int) -> None:
|
||||
"""
|
||||
Retrieve episode information for a specific season.
|
||||
|
||||
@ -93,6 +97,12 @@ class ScrapeSerie:
|
||||
Raises:
|
||||
Exception: If there's an error fetching episode information
|
||||
"""
|
||||
self.headers = {
|
||||
'user-agent': get_headers(),
|
||||
'x-inertia': 'true',
|
||||
'x-inertia-version': self.version,
|
||||
}
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
url=f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
|
||||
@ -102,11 +112,11 @@ class ScrapeSerie:
|
||||
response.raise_for_status()
|
||||
|
||||
# Extract episodes from JSON response
|
||||
json_response = response.json().get('props', {}).get('loadedSeason', {}).get('episodes', [])
|
||||
json_response = response.json().get('props').get('loadedSeason').get('episodes')
|
||||
|
||||
# Add each episode to the episode manager
|
||||
for dict_episode in json_response:
|
||||
self.obj_episode_manager.add_episode(dict_episode)
|
||||
self.episode_manager.add(dict_episode)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error collecting title season info: {e}")
|
||||
|
@ -49,7 +49,16 @@ def get_final_redirect_url(initial_url, max_timeout):
|
||||
|
||||
# Create a client with redirects enabled
|
||||
try:
|
||||
with httpx.Client(follow_redirects=True, timeout=max_timeout, headers={'user-agent': get_headers()}) as client:
|
||||
with httpx.Client(
|
||||
headers={
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||
'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||
'User-Agent': get_headers()
|
||||
},
|
||||
follow_redirects=True,
|
||||
timeout=max_timeout
|
||||
|
||||
) as client:
|
||||
response = client.get(initial_url)
|
||||
response.raise_for_status()
|
||||
|
||||
@ -59,7 +68,7 @@ def get_final_redirect_url(initial_url, max_timeout):
|
||||
return final_url
|
||||
|
||||
except Exception as e:
|
||||
console.print(f"[cyan]Test url[white]: [red]{initial_url}, [cyan]error[white]: [red]{e}")
|
||||
console.print(f"\n[cyan]Test url[white]: [red]{initial_url}, [cyan]error[white]: [red]{e}")
|
||||
return None
|
||||
|
||||
def search_domain(site_name: str, base_url: str):
|
||||
@ -69,7 +78,6 @@ def search_domain(site_name: str, base_url: str):
|
||||
Parameters:
|
||||
- site_name (str): The name of the site to search the domain for.
|
||||
- base_url (str): The base URL to construct complete URLs.
|
||||
- follow_redirects (bool): To follow redirect url or not.
|
||||
|
||||
Returns:
|
||||
tuple: The found domain and the complete URL.
|
||||
@ -80,47 +88,67 @@ def search_domain(site_name: str, base_url: str):
|
||||
domain = str(config_manager.get_dict("SITE", site_name)['domain'])
|
||||
|
||||
try:
|
||||
|
||||
# Test the current domain
|
||||
response_follow = httpx.get(f"{base_url}.{domain}", headers={'user-agent': get_headers()}, timeout=max_timeout, follow_redirects=True)
|
||||
response_follow.raise_for_status()
|
||||
with httpx.Client(
|
||||
headers={
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||
'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||
'User-Agent': get_headers()
|
||||
},
|
||||
follow_redirects=True,
|
||||
timeout=max_timeout
|
||||
|
||||
) as client:
|
||||
response_follow = client.get(f"{base_url}.{domain}")
|
||||
response_follow.raise_for_status()
|
||||
|
||||
except Exception as e:
|
||||
|
||||
query = base_url.split("/")[-1]
|
||||
first_url = google_search(query)
|
||||
console.print(f"[green]First url from google seach[white]: [red]{first_url}")
|
||||
|
||||
# Perform a Google search with multiple results
|
||||
search_results = list(search(query, num_results=5))
|
||||
#console.print(f"[green]Google search results[white]: {search_results}")
|
||||
|
||||
if first_url:
|
||||
final_url = get_final_redirect_url(first_url, max_timeout)
|
||||
|
||||
if final_url != None:
|
||||
console.print(f"\n[bold yellow]Suggestion:[/bold yellow] [white](Experimental)\n"
|
||||
f"[cyan]New final URL[white]: [green]{final_url}")
|
||||
|
||||
def extract_domain(url):
|
||||
parsed_url = urlparse(url)
|
||||
domain = parsed_url.netloc
|
||||
return domain.split(".")[-1]
|
||||
|
||||
new_domain_extract = extract_domain(str(final_url))
|
||||
|
||||
if msg.ask(f"[red]Do you want to auto update config.json - '[green]{site_name}[red]' with domain: [green]{new_domain_extract}", choices=["y", "n"], default="y").lower() == "y":
|
||||
|
||||
# Update domain in config.json
|
||||
config_manager.config['SITE'][site_name]['domain'] = new_domain_extract
|
||||
config_manager.write_config()
|
||||
|
||||
# Return config domain
|
||||
#console.print(f"[cyan]Return domain: [red]{new_domain_extract} \n")
|
||||
return new_domain_extract, f"{base_url}.{new_domain_extract}"
|
||||
# Iterate through search results
|
||||
for first_url in search_results:
|
||||
console.print(f"[green]Checking url[white]: [red]{first_url}")
|
||||
|
||||
else:
|
||||
console.print("[bold red]\nManually change the domain in the JSON file.[/bold red]")
|
||||
raise
|
||||
# Check if the base URL matches the Google search result
|
||||
parsed_first_url = urlparse(first_url)
|
||||
|
||||
else:
|
||||
console.print("[bold red]No valid URL to follow redirects.[/bold red]")
|
||||
# Compare base url from google search with base url from config.json
|
||||
if parsed_first_url.netloc.split(".")[0] == base_url:
|
||||
console.print(f"[red]URL does not match base URL. Skipping.[/red]")
|
||||
continue
|
||||
|
||||
try:
|
||||
final_url = get_final_redirect_url(first_url, max_timeout)
|
||||
|
||||
if final_url is not None:
|
||||
|
||||
def extract_domain(url):
|
||||
parsed_url = urlparse(url)
|
||||
domain = parsed_url.netloc
|
||||
return domain.split(".")[-1]
|
||||
|
||||
new_domain_extract = extract_domain(str(final_url))
|
||||
|
||||
if msg.ask(f"[cyan]\nDo you want to auto site[white]: [red]{site_name}[cyan] with domain[white]: [red]{new_domain_extract}", choices=["y", "n"], default="y").lower() == "y":
|
||||
|
||||
# Update domain in config.json
|
||||
config_manager.config['SITE'][site_name]['domain'] = new_domain_extract
|
||||
config_manager.write_config()
|
||||
|
||||
# Return config domain
|
||||
return new_domain_extract, f"{base_url}.{new_domain_extract}"
|
||||
|
||||
except Exception as redirect_error:
|
||||
console.print(f"[red]Error following redirect for {first_url}: {redirect_error}")
|
||||
continue
|
||||
|
||||
# If no matching URL is found
|
||||
console.print("[bold red]No valid URL found matching the base URL.[/bold red]")
|
||||
raise Exception("No matching domain found")
|
||||
|
||||
# Ensure the URL is in string format before parsing
|
||||
parsed_url = urlparse(str(response_follow.url))
|
||||
@ -128,10 +156,9 @@ def search_domain(site_name: str, base_url: str):
|
||||
tld = parse_domain.split('.')[-1]
|
||||
|
||||
if tld is not None:
|
||||
|
||||
# Update domain in config.json
|
||||
config_manager.config['SITE'][site_name]['domain'] = tld
|
||||
config_manager.write_config()
|
||||
|
||||
# Return config domain
|
||||
return tld, f"{base_url}.{tld}"
|
||||
return tld, f"{base_url}.{tld}"
|
@ -117,16 +117,29 @@ def validate_selection(list_season_select: List[int], seasons_count: int) -> Lis
|
||||
Returns:
|
||||
- List[int]: Adjusted list of valid season numbers.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
|
||||
# Remove any seasons greater than the available seasons
|
||||
valid_seasons = [season for season in list_season_select if 1 <= season <= seasons_count]
|
||||
|
||||
# Remove any seasons greater than the available seasons
|
||||
valid_seasons = [season for season in list_season_select if 1 <= season <= seasons_count]
|
||||
# If the list is empty, the input was completely invalid
|
||||
if not valid_seasons:
|
||||
logging.error(f"Invalid selection: The selected seasons are outside the available range (1-{seasons_count}). Please try again.")
|
||||
|
||||
# If the list is empty, the input was completely invalid
|
||||
if not valid_seasons:
|
||||
print()
|
||||
raise ValueError(f"Invalid selection: The selected seasons are outside the available range (1-{seasons_count}).")
|
||||
# Re-prompt for valid input
|
||||
input_seasons = input(f"Enter valid season numbers (1-{seasons_count}): ")
|
||||
list_season_select = list(map(int, input_seasons.split(',')))
|
||||
continue # Re-prompt the user if the selection is invalid
|
||||
|
||||
return valid_seasons # Return the valid seasons if the input is correct
|
||||
|
||||
except ValueError:
|
||||
logging.error("Error: Please enter valid integers separated by commas.")
|
||||
|
||||
return valid_seasons
|
||||
# Prompt the user for valid input again
|
||||
input_seasons = input(f"Enter valid season numbers (1-{seasons_count}): ")
|
||||
list_season_select = list(map(int, input_seasons.split(',')))
|
||||
|
||||
|
||||
# --> for episode
|
||||
@ -141,13 +154,26 @@ def validate_episode_selection(list_episode_select: List[int], episodes_count: i
|
||||
Returns:
|
||||
- List[int]: Adjusted list of valid episode numbers.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
|
||||
# Remove any episodes greater than the available episodes
|
||||
valid_episodes = [episode for episode in list_episode_select if 1 <= episode <= episodes_count]
|
||||
# Remove any episodes greater than the available episodes
|
||||
valid_episodes = [episode for episode in list_episode_select if 1 <= episode <= episodes_count]
|
||||
|
||||
# If the list is empty, the input was completely invalid
|
||||
if not valid_episodes:
|
||||
print()
|
||||
raise ValueError(f"Invalid selection: The selected episodes are outside the available range (1-{episodes_count}).")
|
||||
# If the list is empty, the input was completely invalid
|
||||
if not valid_episodes:
|
||||
logging.error(f"Invalid selection: The selected episodes are outside the available range (1-{episodes_count}). Please try again.")
|
||||
|
||||
return valid_episodes
|
||||
# Re-prompt for valid input
|
||||
input_episodes = input(f"Enter valid episode numbers (1-{episodes_count}): ")
|
||||
list_episode_select = list(map(int, input_episodes.split(',')))
|
||||
continue # Re-prompt the user if the selection is invalid
|
||||
|
||||
return valid_episodes
|
||||
|
||||
except ValueError:
|
||||
logging.error("Error: Please enter valid integers separated by commas.")
|
||||
|
||||
# Prompt the user for valid input again
|
||||
input_episodes = input(f"Enter valid episode numbers (1-{episodes_count}): ")
|
||||
list_episode_select = list(map(int, input_episodes.split(',')))
|
@ -229,11 +229,6 @@ class M3U8_Segments:
|
||||
self.download_interrupted = True
|
||||
self.stop_event.set()
|
||||
|
||||
if threading.current_thread() is threading.main_thread():
|
||||
signal.signal(signal.SIGINT, interrupt_handler)
|
||||
else:
|
||||
print("Signal handler must be set in the main thread")
|
||||
|
||||
def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
|
||||
"""
|
||||
Downloads a TS segment and adds it to the segment queue with retry logic.
|
||||
@ -548,7 +543,7 @@ class M3U8_Segments:
|
||||
file_size = os.path.getsize(self.tmp_file_path)
|
||||
if file_size == 0:
|
||||
raise Exception("Output file is empty")
|
||||
|
||||
|
||||
# Get expected time
|
||||
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
|
||||
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
|
||||
|
76
StreamingCommunity/Lib/Driver/driver_1.py
Normal file
76
StreamingCommunity/Lib/Driver/driver_1.py
Normal file
@ -0,0 +1,76 @@
|
||||
# 29.06.24
|
||||
|
||||
import tempfile
|
||||
import logging
|
||||
|
||||
|
||||
# External library
|
||||
from bs4 import BeautifulSoup
|
||||
from seleniumbase import Driver
|
||||
|
||||
|
||||
# Internal utilities
|
||||
from StreamingCommunity.Util._jsonConfig import config_manager
|
||||
|
||||
|
||||
# Config
|
||||
USE_HEADLESS = config_manager.get_bool("BROWSER", "headless")
|
||||
|
||||
|
||||
class WebAutomation:
|
||||
"""
|
||||
A class for automating web interactions using SeleniumBase and BeautifulSoup.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initializes the WebAutomation instance with SeleniumBase Driver.
|
||||
|
||||
Parameters:
|
||||
headless (bool, optional): Whether to run the browser in headless mode. Default is True.
|
||||
"""
|
||||
logging.getLogger('seleniumbase').setLevel(logging.ERROR)
|
||||
|
||||
self.driver = Driver(
|
||||
uc=True,
|
||||
uc_cdp_events=True,
|
||||
headless=USE_HEADLESS,
|
||||
user_data_dir = tempfile.mkdtemp(),
|
||||
chromium_arg="--disable-search-engine-choice-screen"
|
||||
)
|
||||
|
||||
def quit(self):
|
||||
"""
|
||||
Quits the WebDriver instance.
|
||||
"""
|
||||
self.driver.quit()
|
||||
|
||||
def get_page(self, url):
|
||||
"""
|
||||
Navigates the browser to the specified URL.
|
||||
|
||||
Parameters:
|
||||
url (str): The URL to navigate to.
|
||||
"""
|
||||
self.driver.get(url)
|
||||
|
||||
def retrieve_soup(self):
|
||||
"""
|
||||
Retrieves the BeautifulSoup object for the current page's HTML content.
|
||||
|
||||
Returns:
|
||||
BeautifulSoup object: Parsed HTML content of the current page.
|
||||
"""
|
||||
html_content = self.driver.page_source
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
return soup
|
||||
|
||||
def get_content(self):
|
||||
"""
|
||||
Returns the HTML content of the current page.
|
||||
|
||||
Returns:
|
||||
str: The HTML content of the current page.
|
||||
"""
|
||||
return self.driver.page_source
|
||||
|
@ -55,7 +55,6 @@ def get_video_duration(file_path: str) -> float:
|
||||
Returns:
|
||||
(float): The duration of the video in seconds if successful, None if there's an error.
|
||||
"""
|
||||
|
||||
try:
|
||||
ffprobe_cmd = [FFPROB_PATH, '-v', 'error', '-show_format', '-print_format', 'json', file_path]
|
||||
logging.info(f"FFmpeg command: {ffprobe_cmd}")
|
||||
|
@ -1,5 +1,12 @@
|
||||
# 03.03.24
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import importlib
|
||||
|
||||
|
||||
# External library
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.prompt import Prompt
|
||||
@ -9,15 +16,13 @@ from typing import Dict, List, Any
|
||||
|
||||
# Internal utilities
|
||||
from .message import start_message
|
||||
from .call_stack import get_call_stack
|
||||
|
||||
|
||||
class TVShowManager:
|
||||
def __init__(self):
|
||||
"""
|
||||
Initialize TVShowManager with provided column information.
|
||||
|
||||
Parameters:
|
||||
- column_info (Dict[str, Dict[str, str]]): Dictionary containing column names, their colors, and justification.
|
||||
"""
|
||||
self.console = Console()
|
||||
self.tv_shows: List[Dict[str, Any]] = [] # List to store TV show data as dictionaries
|
||||
@ -80,7 +85,6 @@ class TVShowManager:
|
||||
|
||||
self.console.print(table) # Use self.console.print instead of print
|
||||
|
||||
|
||||
def run(self, force_int_input: bool = False, max_int_input: int = 0) -> str:
|
||||
"""
|
||||
Run the TV show manager application.
|
||||
@ -101,9 +105,16 @@ class TVShowManager:
|
||||
# Display table
|
||||
self.display_data(self.tv_shows[self.slice_start:self.slice_end])
|
||||
|
||||
# Find research function from call stack
|
||||
research_func = None
|
||||
for reverse_fun in get_call_stack():
|
||||
if reverse_fun['function'] == 'search' and reverse_fun['script'] == '__init__.py':
|
||||
research_func = reverse_fun
|
||||
logging.info(f"Found research_func: {research_func}")
|
||||
|
||||
# Handling user input for loading more items or quitting
|
||||
if self.slice_end < total_items:
|
||||
self.console.print(f"\n\n[yellow][INFO] [green]Press [red]Enter [green]for next page, or [red]'q' [green]to quit.")
|
||||
self.console.print(f"\n\n[yellow][INFO] [green]Press [red]Enter [green]for next page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
|
||||
|
||||
if not force_int_input:
|
||||
key = Prompt.ask(
|
||||
@ -113,7 +124,7 @@ class TVShowManager:
|
||||
|
||||
else:
|
||||
choices = [str(i) for i in range(0, max_int_input)]
|
||||
choices.extend(["q", ""])
|
||||
choices.extend(["q", "", "back"])
|
||||
|
||||
key = Prompt.ask("[cyan]Insert media [red]index", choices=choices, show_choices=False)
|
||||
last_command = key
|
||||
@ -127,22 +138,62 @@ class TVShowManager:
|
||||
if self.slice_end > total_items:
|
||||
self.slice_end = total_items
|
||||
|
||||
elif key.lower() == "back" and research_func:
|
||||
try:
|
||||
# Find the project root directory
|
||||
current_path = research_func['folder']
|
||||
while not os.path.exists(os.path.join(current_path, 'StreamingCommunity')):
|
||||
current_path = os.path.dirname(current_path)
|
||||
|
||||
# Add project root to Python path
|
||||
project_root = current_path
|
||||
#print(f"[DEBUG] Project Root: {project_root}")
|
||||
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
# Import using full absolute import
|
||||
module_path = 'StreamingCommunity.Api.Site.streamingcommunity'
|
||||
#print(f"[DEBUG] Importing module: {module_path}")
|
||||
|
||||
# Import the module
|
||||
module = importlib.import_module(module_path)
|
||||
|
||||
# Get the search function
|
||||
search_func = getattr(module, 'media_search_manager')
|
||||
|
||||
# Ask for search string
|
||||
string_to_search = Prompt.ask(f"\n[purple]Insert word to search in [red]{research_func['folder_base']}").strip()
|
||||
|
||||
# Call the search function with the search string
|
||||
search_func(string_to_search)
|
||||
|
||||
except Exception as e:
|
||||
self.console.print(f"[red]Error during search: {e}")
|
||||
|
||||
# Print detailed traceback
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Optionally remove the path if you want to clean up
|
||||
if project_root in sys.path:
|
||||
sys.path.remove(project_root)
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
else:
|
||||
# Last slice, ensure all remaining items are shown
|
||||
self.console.print(f"\n\n[yellow][INFO] [red]You've reached the end. [green]Press [red]Enter [green]for next page, or [red]'q' [green]to quit.")
|
||||
self.console.print(f"\n\n[yellow][INFO] [green]You've reached the end. [red]Enter [green]for first page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
|
||||
if not force_int_input:
|
||||
key = Prompt.ask(
|
||||
"\n[cyan]Insert media index [yellow](e.g., 1), [red]* [cyan]to download all media, "
|
||||
"[yellow](e.g., 1-2) [cyan]for a range of media, or [yellow](e.g., 3-*) [cyan]to download from a specific index to the end"
|
||||
)
|
||||
|
||||
|
||||
else:
|
||||
choices = [str(i) for i in range(0, max_int_input)]
|
||||
choices.extend(["q", ""])
|
||||
choices.extend(["q", "", "back"])
|
||||
|
||||
key = Prompt.ask("[cyan]Insert media [red]index", choices=choices, show_choices=False)
|
||||
last_command = key
|
||||
@ -154,10 +205,51 @@ class TVShowManager:
|
||||
self.slice_start = 0
|
||||
self.slice_end = self.step
|
||||
|
||||
elif key.lower() == "back" and research_func:
|
||||
try:
|
||||
# Find the project root directory
|
||||
current_path = research_func['folder']
|
||||
while not os.path.exists(os.path.join(current_path, 'StreamingCommunity')):
|
||||
current_path = os.path.dirname(current_path)
|
||||
|
||||
# Add project root to Python path
|
||||
project_root = current_path
|
||||
#print(f"[DEBUG] Project Root: {project_root}")
|
||||
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
# Import using full absolute import
|
||||
module_path = 'StreamingCommunity.Api.Site.streamingcommunity'
|
||||
#print(f"[DEBUG] Importing module: {module_path}")
|
||||
|
||||
# Import the module
|
||||
module = importlib.import_module(module_path)
|
||||
|
||||
# Get the search function
|
||||
search_func = getattr(module, 'media_search_manager')
|
||||
|
||||
# Ask for search string
|
||||
string_to_search = Prompt.ask(f"\n[purple]Insert word to search in [red]{research_func['folder_base']}").strip()
|
||||
|
||||
# Call the search function with the search string
|
||||
search_func(string_to_search)
|
||||
|
||||
except Exception as e:
|
||||
self.console.print(f"[red]Error during search: {e}")
|
||||
|
||||
# Print detailed traceback
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Optionally remove the path if you want to clean up
|
||||
if project_root in sys.path:
|
||||
sys.path.remove(project_root)
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
return last_command
|
||||
|
||||
def clear(self):
|
||||
self.tv_shows = []
|
||||
self.tv_shows = []
|
@ -3,8 +3,7 @@ import axios from 'axios';
|
||||
import { Container, Button, Form, InputGroup } from 'react-bootstrap';
|
||||
|
||||
import SearchBar from './SearchBar.js';
|
||||
|
||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
||||
import { API_URL } from './ApiUrl.js';
|
||||
|
||||
const Dashboard = () => {
|
||||
const [items, setItems] = useState([]);
|
||||
@ -15,7 +14,7 @@ const Dashboard = () => {
|
||||
|
||||
const fetchItems = async (filter = '') => {
|
||||
try {
|
||||
const response = await axios.get(`${API_BASE_URL}/api/items?filter=${filter}`);
|
||||
const response = await axios.get(`${API_URL}/items?filter=${filter}`);
|
||||
setItems(response.data);
|
||||
} catch (error) {
|
||||
console.error("Error fetching items:", error);
|
||||
|
@ -4,7 +4,7 @@ import { Container, Row, Col, Card, Button, Badge, Modal } from 'react-bootstrap
|
||||
import { FaTrash, FaPlay } from 'react-icons/fa';
|
||||
import { Link } from 'react-router-dom';
|
||||
|
||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
||||
import { SERVER_PATH_URL, SERVER_DELETE_URL, API_URL } from './ApiUrl';
|
||||
|
||||
const Downloads = () => {
|
||||
const [downloads, setDownloads] = useState([]);
|
||||
@ -15,7 +15,7 @@ const Downloads = () => {
|
||||
// Fetch all downloads
|
||||
const fetchDownloads = async () => {
|
||||
try {
|
||||
const response = await axios.get(`${API_BASE_URL}/downloads`);
|
||||
const response = await axios.get(`${SERVER_PATH_URL}/get`);
|
||||
setDownloads(response.data);
|
||||
setLoading(false);
|
||||
} catch (error) {
|
||||
@ -27,7 +27,7 @@ const Downloads = () => {
|
||||
// Delete a TV episode
|
||||
const handleDeleteEpisode = async (id, season, episode) => {
|
||||
try {
|
||||
await axios.delete(`${API_BASE_URL}/deleteEpisode`, {
|
||||
await axios.delete(`${SERVER_DELETE_URL}/episode`, {
|
||||
params: { id, season, episode }
|
||||
});
|
||||
fetchDownloads(); // Refresh the list
|
||||
@ -39,7 +39,7 @@ const Downloads = () => {
|
||||
// Delete a movie
|
||||
const handleDeleteMovie = async (id) => {
|
||||
try {
|
||||
await axios.delete(`${API_BASE_URL}/deleteMovie`, {
|
||||
await axios.delete(`${SERVER_DELETE_URL}/movie`, {
|
||||
params: { id }
|
||||
});
|
||||
fetchDownloads(); // Refresh the list
|
||||
@ -50,13 +50,16 @@ const Downloads = () => {
|
||||
|
||||
// Watch video
|
||||
const handleWatchVideo = (videoPath) => {
|
||||
console.log("Video path received:", videoPath); // Controlla il valore di videoPath
|
||||
setCurrentVideo(videoPath);
|
||||
setShowPlayer(true);
|
||||
};
|
||||
|
||||
|
||||
// Initial fetch of downloads
|
||||
useEffect(() => {
|
||||
fetchDownloads();
|
||||
console.log("Downloads fetched:", downloads);
|
||||
}, []);
|
||||
|
||||
if (loading) {
|
||||
@ -107,7 +110,7 @@ const Downloads = () => {
|
||||
<Button
|
||||
variant="primary"
|
||||
size="sm"
|
||||
onClick={() => handleWatchVideo(movie.path)}
|
||||
onClick={() => handleWatchVideo(movie.path || movie.videoUrl)} // Usa il campo corretto
|
||||
>
|
||||
<FaPlay className="me-2" /> Watch
|
||||
</Button>
|
||||
@ -180,12 +183,12 @@ const Downloads = () => {
|
||||
{/* Modal Video Player */}
|
||||
<Modal show={showPlayer} onHide={() => setShowPlayer(false)} size="lg" centered>
|
||||
<Modal.Body>
|
||||
<video
|
||||
src={`http://127.0.0.1:1234/downloaded/${currentVideo}`}
|
||||
controls
|
||||
autoPlay
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
<video
|
||||
src={`${API_URL}/downloaded/${currentVideo}`}
|
||||
controls
|
||||
autoPlay
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Modal.Body>
|
||||
</Modal>
|
||||
</Container>
|
||||
|
@ -1,5 +1,5 @@
|
||||
import React, { useState } from 'react';
|
||||
import PropTypes from 'prop-types'; // Add this import
|
||||
import PropTypes from 'prop-types';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { Form, InputGroup, Button } from 'react-bootstrap';
|
||||
import { FaSearch } from 'react-icons/fa';
|
||||
@ -38,11 +38,8 @@ const SearchBar = ({ onSearch }) => {
|
||||
);
|
||||
};
|
||||
|
||||
// Add PropTypes validation
|
||||
SearchBar.propTypes = {
|
||||
onSearch: PropTypes.func // If onSearch is optional
|
||||
// or
|
||||
// onSearch: PropTypes.func.isRequired // If onSearch is required
|
||||
onSearch: PropTypes.func
|
||||
};
|
||||
|
||||
export default SearchBar;
|
@ -4,8 +4,7 @@ import axios from 'axios';
|
||||
import { Container, Row, Col, Card, Spinner } from 'react-bootstrap';
|
||||
|
||||
import SearchBar from './SearchBar.js';
|
||||
|
||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
||||
import { API_URL } from './ApiUrl.js';
|
||||
|
||||
const SearchResults = () => {
|
||||
const [results, setResults] = useState([]);
|
||||
@ -20,7 +19,7 @@ const SearchResults = () => {
|
||||
const fetchSearchResults = async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
const response = await axios.get(`${API_BASE_URL}/api/search`, {
|
||||
const response = await axios.get(`${API_URL}/search`, {
|
||||
params: { q: query }
|
||||
});
|
||||
setResults(response.data);
|
||||
|
@ -6,7 +6,7 @@ import { FaDownload, FaPlay, FaPlus, FaTrash } from 'react-icons/fa';
|
||||
|
||||
import SearchBar from './SearchBar.js';
|
||||
|
||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
||||
import { API_URL, SERVER_WATCHLIST_URL, SERVER_PATH_URL } from './ApiUrl.js';
|
||||
|
||||
const TitleDetail = () => {
|
||||
const [titleDetails, setTitleDetails] = useState(null);
|
||||
@ -27,7 +27,7 @@ const TitleDetail = () => {
|
||||
const titleUrl = location.state?.url || location.pathname.split('/title/')[1];
|
||||
|
||||
// Fetch title information
|
||||
const response = await axios.get(`${API_BASE_URL}/api/getInfo`, {
|
||||
const response = await axios.get(`${API_URL}/getInfo`, {
|
||||
params: { url: titleUrl }
|
||||
});
|
||||
|
||||
@ -59,7 +59,7 @@ const TitleDetail = () => {
|
||||
const checkDownloadStatus = async (titleData) => {
|
||||
try {
|
||||
if (titleData.type === 'movie') {
|
||||
const response = await axios.get(`${API_BASE_URL}/downloads`);
|
||||
const response = await axios.get(`${SERVER_PATH_URL}/get`);
|
||||
const downloadedMovie = response.data.find(
|
||||
download => download.type === 'movie' && download.slug === titleData.slug
|
||||
);
|
||||
@ -70,7 +70,7 @@ const TitleDetail = () => {
|
||||
}
|
||||
});
|
||||
} else if (titleData.type === 'tv') {
|
||||
const response = await axios.get(`${API_BASE_URL}/downloads`);
|
||||
const response = await axios.get(`${SERVER_PATH_URL}/get`);
|
||||
const downloadedEpisodes = response.data.filter(
|
||||
download => download.type === 'tv' && download.slug === titleData.slug
|
||||
);
|
||||
@ -92,7 +92,7 @@ const TitleDetail = () => {
|
||||
// Check watchlist status
|
||||
const checkWatchlistStatus = async (slug) => {
|
||||
try {
|
||||
const response = await axios.get(`${API_BASE_URL}/api/getWatchlist`);
|
||||
const response = await axios.get(`${SERVER_WATCHLIST_URL}/get`);
|
||||
const inWatchlist = response.data.some(item => item.name === slug);
|
||||
setIsInWatchlist(inWatchlist);
|
||||
} catch (error) {
|
||||
@ -104,7 +104,7 @@ const TitleDetail = () => {
|
||||
if (titleDetails.type === 'tv') {
|
||||
try {
|
||||
setLoading(true);
|
||||
const seasonResponse = await axios.get(`${API_BASE_URL}/api/getInfoSeason`, {
|
||||
const seasonResponse = await axios.get(`${API_URL}/getInfoSeason`, {
|
||||
params: {
|
||||
url: location.state?.url,
|
||||
n: seasonNumber
|
||||
@ -123,7 +123,7 @@ const TitleDetail = () => {
|
||||
|
||||
const handleDownloadFilm = async () => {
|
||||
try {
|
||||
const response = await axios.get(`${API_BASE_URL}/downloadFilm`, {
|
||||
const response = await axios.get(`${API_URL}/download/film`, {
|
||||
params: {
|
||||
id: titleDetails.id,
|
||||
slug: titleDetails.slug
|
||||
@ -144,12 +144,14 @@ const TitleDetail = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownloadEpisode = async (seasonNumber, episodeNumber) => {
|
||||
const handleDownloadEpisode = async (seasonNumber, episodeNumber, titleID, titleSlug) => {
|
||||
try {
|
||||
const response = await axios.get(`${API_BASE_URL}/downloadEpisode`, {
|
||||
const response = await axios.get(`${API_URL}/download/episode`, {
|
||||
params: {
|
||||
n_s: seasonNumber,
|
||||
n_ep: episodeNumber
|
||||
n_ep: episodeNumber,
|
||||
titleID: titleID,
|
||||
slug: titleSlug
|
||||
}
|
||||
});
|
||||
const videoPath = response.data.path;
|
||||
@ -176,7 +178,7 @@ const TitleDetail = () => {
|
||||
try {
|
||||
let path;
|
||||
if (titleDetails.type === 'movie') {
|
||||
const response = await axios.get(`${API_BASE_URL}/moviePath`, {
|
||||
const response = await axios.get(`${SERVER_PATH_URL}/movie`, {
|
||||
params: { id: titleDetails.id }
|
||||
});
|
||||
path = response.data.path;
|
||||
@ -198,21 +200,21 @@ const TitleDetail = () => {
|
||||
|
||||
const handleAddToWatchlist = async () => {
|
||||
try {
|
||||
await axios.post(`${API_BASE_URL}/api/addWatchlist`, {
|
||||
await axios.post(`${SERVER_WATCHLIST_URL}/add`, {
|
||||
name: titleDetails.slug,
|
||||
url: location.state?.url || location.pathname.split('/title/')[1],
|
||||
season: titleDetails.season_count
|
||||
season: titleDetails.season_count // Changed 'season_count' to 'season'
|
||||
});
|
||||
setIsInWatchlist(true);
|
||||
} catch (error) {
|
||||
console.error("Error adding to watchlist:", error);
|
||||
alert("Error adding to watchlist. Please try again.");
|
||||
}
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
const handleRemoveFromWatchlist = async () => {
|
||||
try {
|
||||
await axios.post(`${API_BASE_URL}/api/removeWatchlist`, {
|
||||
await axios.post(`${SERVER_WATCHLIST_URL}/remove`, {
|
||||
name: titleDetails.slug
|
||||
});
|
||||
setIsInWatchlist(false);
|
||||
@ -375,7 +377,7 @@ const TitleDetail = () => {
|
||||
) : (
|
||||
<Button
|
||||
variant="primary"
|
||||
onClick={() => handleDownloadEpisode(selectedSeason, episode.number)}
|
||||
onClick={() => handleDownloadEpisode(selectedSeason, episode.number, titleDetails.id, titleDetails.slug)}
|
||||
>
|
||||
<FaDownload className="me-2" /> Download
|
||||
</Button>
|
||||
@ -393,7 +395,7 @@ const TitleDetail = () => {
|
||||
<Modal show={showPlayer} onHide={() => setShowPlayer(false)} size="lg" centered>
|
||||
<Modal.Body>
|
||||
<video
|
||||
src={`http://127.0.0.1:1234/downloaded/${currentVideo}`}
|
||||
src={`${API_URL}/downloaded/${currentVideo}`}
|
||||
controls
|
||||
autoPlay
|
||||
style={{ width: '100%' }}
|
||||
|
@ -4,7 +4,7 @@ import { Container, Row, Col, Card, Button, Badge, Alert } from 'react-bootstrap
|
||||
import { Link } from 'react-router-dom';
|
||||
import { FaTrash } from 'react-icons/fa';
|
||||
|
||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
||||
import { SERVER_WATCHLIST_URL } from './ApiUrl';
|
||||
|
||||
const Watchlist = () => {
|
||||
const [watchlistItems, setWatchlistItems] = useState([]);
|
||||
@ -15,7 +15,7 @@ const Watchlist = () => {
|
||||
// Funzione per recuperare i dati della watchlist
|
||||
const fetchWatchlistData = async () => {
|
||||
try {
|
||||
const watchlistResponse = await axios.get(`${API_BASE_URL}/api/getWatchlist`);
|
||||
const watchlistResponse = await axios.get(`${SERVER_WATCHLIST_URL}/get`);
|
||||
setWatchlistItems(watchlistResponse.data);
|
||||
setLoading(false);
|
||||
} catch (error) {
|
||||
@ -27,7 +27,7 @@ const Watchlist = () => {
|
||||
// Funzione per controllare se ci sono nuove stagioni (attivata dal bottone)
|
||||
const checkNewSeasons = async () => {
|
||||
try {
|
||||
const newSeasonsResponse = await axios.get(`${API_BASE_URL}/api/checkWatchlist`);
|
||||
const newSeasonsResponse = await axios.get(`${SERVER_WATCHLIST_URL}/check`);
|
||||
|
||||
if (Array.isArray(newSeasonsResponse.data)) {
|
||||
setNewSeasons(newSeasonsResponse.data);
|
||||
@ -58,7 +58,7 @@ const Watchlist = () => {
|
||||
// Manda una richiesta POST per ogni titolo con nuove stagioni
|
||||
console.log(`Updated watchlist for ${season.name} with new season ${season.nNewSeason}, url: ${season.title_url}`);
|
||||
|
||||
await axios.post(`${API_BASE_URL}/api/updateTitleWatchlist`, {
|
||||
await axios.post(`${SERVER_WATCHLIST_URL}/update`, {
|
||||
url: season.title_url,
|
||||
season: season.season
|
||||
});
|
||||
@ -72,15 +72,17 @@ const Watchlist = () => {
|
||||
// Funzione per rimuovere un elemento dalla watchlist
|
||||
const handleRemoveFromWatchlist = async (serieName) => {
|
||||
try {
|
||||
await axios.post(`${API_BASE_URL}/api/removeWatchlist`, { name: serieName });
|
||||
|
||||
// Aggiorna lo stato locale per rimuovere l'elemento dalla watchlist
|
||||
await axios.post(`${SERVER_WATCHLIST_URL}/remove`, {
|
||||
name: serieName
|
||||
});
|
||||
|
||||
setWatchlistItems((prev) => prev.filter((item) => item.name !== serieName));
|
||||
} catch (error) {
|
||||
console.error("Error removing from watchlist:", error);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Carica inizialmente la watchlist
|
||||
useEffect(() => {
|
||||
fetchWatchlistData();
|
||||
|
12
config.json
12
config.json
@ -2,9 +2,9 @@
|
||||
"DEFAULT": {
|
||||
"debug": false,
|
||||
"log_file": "app.log",
|
||||
"log_to_file": true,
|
||||
"show_message": true,
|
||||
"clean_console": true,
|
||||
"log_to_file": false,
|
||||
"show_message": false,
|
||||
"clean_console": false,
|
||||
"root_path": "Video",
|
||||
"movie_folder_name": "Movie",
|
||||
"serie_folder_name": "TV",
|
||||
@ -24,8 +24,8 @@
|
||||
"download_video": true,
|
||||
"download_audio": true,
|
||||
"merge_audio": true,
|
||||
"default_video_workser": 12,
|
||||
"default_audio_workser": 12,
|
||||
"default_video_workser": 8,
|
||||
"default_audio_workser": 8,
|
||||
"specific_list_audio": [
|
||||
"ita"
|
||||
],
|
||||
@ -55,7 +55,7 @@
|
||||
}
|
||||
},
|
||||
"EXTRA": {
|
||||
"mongodb": "mongodb+srv://..",
|
||||
"mongodb": "mongodb+srv://...",
|
||||
"database": "StreamingCommunity"
|
||||
}
|
||||
}
|
718
server.py
718
server.py
@ -1,19 +1,30 @@
|
||||
# 13.12.24
|
||||
|
||||
import os
|
||||
import logging
|
||||
#logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
||||
import datetime
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import unquote
|
||||
from urllib.parse import urlparse, unquote
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# External
|
||||
import uvicorn
|
||||
from rich.console import Console
|
||||
from pymongo import MongoClient
|
||||
from flask_cors import CORS
|
||||
from flask import Flask, jsonify, request
|
||||
from flask import send_from_directory
|
||||
from fastapi import FastAPI, HTTPException, Query
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
|
||||
# Util
|
||||
from StreamingCommunity.Util.os import os_summary
|
||||
os_summary.get_system_summary()
|
||||
from StreamingCommunity.Util.logger import Logger
|
||||
log = Logger()
|
||||
from StreamingCommunity.Util._jsonConfig import config_manager
|
||||
from server_type import WatchlistItem, UpdateWatchlistItem
|
||||
from server_util import updateUrl
|
||||
|
||||
|
||||
# Internal
|
||||
@ -23,19 +34,30 @@ from StreamingCommunity.Api.Site.streamingcommunity.film import download_film
|
||||
from StreamingCommunity.Api.Site.streamingcommunity.series import download_video
|
||||
from StreamingCommunity.Api.Site.streamingcommunity.util.ScrapeSerie import ScrapeSerie
|
||||
|
||||
|
||||
# Player
|
||||
from StreamingCommunity.Api.Player.vixcloud import VideoSource
|
||||
|
||||
|
||||
# Variable
|
||||
app = Flask(__name__)
|
||||
CORS(app)
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"]
|
||||
)
|
||||
|
||||
|
||||
# Site variable
|
||||
version, domain = get_version_and_domain()
|
||||
season_name = None
|
||||
scrape_serie = ScrapeSerie("streamingcommunity")
|
||||
video_source = VideoSource("streamingcommunity", True)
|
||||
|
||||
DOWNLOAD_DIRECTORY = os.getcwd()
|
||||
console = Console()
|
||||
|
||||
|
||||
# Mongo variable
|
||||
@ -47,179 +69,134 @@ downloads_collection = db['downloads']
|
||||
|
||||
|
||||
# ---------- SITE API ------------
|
||||
@app.route('/')
|
||||
def index():
|
||||
"""
|
||||
Health check endpoint to confirm server is operational.
|
||||
|
||||
Returns:
|
||||
str: Operational status message
|
||||
"""
|
||||
@app.get("/", summary="Health Check")
|
||||
async def index():
|
||||
logging.info("Health check endpoint accessed")
|
||||
return 'Server is operational'
|
||||
return "Server is operational"
|
||||
|
||||
@app.route('/api/search', methods=['GET'])
|
||||
def get_list_search():
|
||||
"""
|
||||
Search for titles based on query parameter.
|
||||
|
||||
Returns:
|
||||
JSON response with search results or error message
|
||||
"""
|
||||
@app.get("/api/search")
|
||||
async def get_list_search(q: Optional[str] = Query(None)):
|
||||
if not q:
|
||||
logging.warning("Search request without query parameter")
|
||||
raise HTTPException(status_code=400, detail="Missing query parameter")
|
||||
try:
|
||||
query = request.args.get('q')
|
||||
|
||||
if not query:
|
||||
logging.warning("Search request without query parameter")
|
||||
return jsonify({'error': 'Missing query parameter'}), 400
|
||||
|
||||
result = search_titles(query, domain)
|
||||
logging.info(f"Search performed for query: {query}")
|
||||
return jsonify(result), 200
|
||||
|
||||
result = search_titles(q, domain)
|
||||
logging.info(f"Search performed for query: {q}")
|
||||
return result
|
||||
except Exception as e:
|
||||
logging.error(f"Error in search: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@app.route('/api/getInfo', methods=['GET'])
|
||||
def get_info_title():
|
||||
"""
|
||||
Retrieve information for a specific title.
|
||||
@app.get("/api/getInfo")
|
||||
async def get_info_title(url: Optional[str] = Query(None)):
|
||||
if not url or "http" not in url:
|
||||
logging.warning("GetInfo request without URL parameter")
|
||||
raise HTTPException(status_code=400, detail="Missing URL parameter")
|
||||
|
||||
|
||||
Returns:
|
||||
JSON response with title information or error message
|
||||
"""
|
||||
try:
|
||||
title_url = request.args.get('url')
|
||||
|
||||
if not title_url:
|
||||
logging.warning("GetInfo request without URL parameter")
|
||||
return jsonify({'error': 'Missing URL parameter'}), 400
|
||||
|
||||
result = get_infoSelectTitle(title_url, domain, version)
|
||||
|
||||
result = get_infoSelectTitle(url, domain, version)
|
||||
|
||||
if result.get('type') == "tv":
|
||||
global season_name, scrape_serie, video_source
|
||||
|
||||
season_name = result.get('slug')
|
||||
|
||||
scrape_serie.setup(
|
||||
version=version,
|
||||
media_id=int(result.get('id')),
|
||||
series_name=result.get('slug')
|
||||
)
|
||||
|
||||
video_source.setup(result.get('id'))
|
||||
|
||||
|
||||
logging.info(f"TV series info retrieved: {season_name}")
|
||||
|
||||
return jsonify(result), 200
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error retrieving title info: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Failed to retrieve title information'}), 500
|
||||
|
||||
@app.route('/api/getInfoSeason', methods=['GET'])
|
||||
def get_info_season():
|
||||
"""
|
||||
Retrieve season information for a specific title.
|
||||
@app.get("/api/getInfoSeason")
|
||||
async def get_info_season(url: Optional[str] = Query(None), n: Optional[str] = Query(None)):
|
||||
if not url or not n:
|
||||
logging.warning("GetInfoSeason request with missing parameters")
|
||||
raise HTTPException(status_code=400, detail="Missing URL or season number")
|
||||
|
||||
Returns:
|
||||
JSON response with season information or error message
|
||||
"""
|
||||
try:
|
||||
title_url = request.args.get('url')
|
||||
number_season = request.args.get('n')
|
||||
|
||||
if not title_url or not number_season:
|
||||
logging.warning("GetInfoSeason request with missing parameters")
|
||||
return jsonify({'error': 'Missing URL or season number'}), 400
|
||||
|
||||
result = get_infoSelectSeason(title_url, number_season, domain, version)
|
||||
logging.info(f"Season info retrieved for season {number_season}")
|
||||
return jsonify(result), 200
|
||||
result = get_infoSelectSeason(url, n, domain, version)
|
||||
logging.info(f"Season info retrieved for season {n}")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error retrieving season info: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Failed to retrieve season information'}), 500
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve season information")
|
||||
|
||||
@app.route('/api/getdomain', methods=['GET'])
|
||||
def get_domain():
|
||||
"""
|
||||
Retrieve current domain and version.
|
||||
|
||||
Returns:
|
||||
JSON response with domain and version
|
||||
"""
|
||||
@app.get("/api/getdomain")
|
||||
async def get_domain():
|
||||
try:
|
||||
global version, domain
|
||||
version, domain = get_version_and_domain()
|
||||
logging.info(f"Domain retrieved: {domain}, Version: {version}")
|
||||
return jsonify({'domain': domain, 'version': version}), 200
|
||||
|
||||
return {"domain": domain, "version": version}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error retrieving domain: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Failed to retrieve domain information'}), 500
|
||||
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve domain information")
|
||||
|
||||
|
||||
# ---------- DOWNLOAD API ------------
|
||||
@app.route('/downloadFilm', methods=['GET'])
|
||||
def call_download_film():
|
||||
"""
|
||||
Download a film by its ID and slug.
|
||||
@app.get("/api/download/film")
|
||||
async def call_download_film(id: Optional[str] = Query(None), slug: Optional[str] = Query(None)):
|
||||
if not id or not slug:
|
||||
logging.warning("Download film request with missing parameters")
|
||||
raise HTTPException(status_code=400, detail="Missing film ID or slug")
|
||||
|
||||
Returns:
|
||||
JSON response with download path or error message
|
||||
"""
|
||||
try:
|
||||
film_id = request.args.get('id')
|
||||
slug = request.args.get('slug')
|
||||
|
||||
if not film_id or not slug:
|
||||
logging.warning("Download film request with missing parameters")
|
||||
return jsonify({'error': 'Missing film ID or slug'}), 400
|
||||
|
||||
item_media = MediaItem(**{'id': film_id, 'slug': slug})
|
||||
item_media = MediaItem(**{'id': id, 'slug': slug})
|
||||
path_download = download_film(item_media)
|
||||
|
||||
download_data = {
|
||||
'type': 'movie',
|
||||
'id': film_id,
|
||||
'id': id,
|
||||
'slug': slug,
|
||||
'path': path_download,
|
||||
'timestamp': datetime.datetime.now(datetime.timezone.utc)
|
||||
}
|
||||
downloads_collection.insert_one(download_data)
|
||||
|
||||
|
||||
logging.info(f"Film downloaded: {slug}")
|
||||
return jsonify({'path': path_download}), 200
|
||||
return {"path": path_download}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error downloading film: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Failed to download film'}), 500
|
||||
raise HTTPException(status_code=500, detail="Failed to download film")
|
||||
|
||||
@app.route('/downloadEpisode', methods=['GET'])
|
||||
def call_download_episode():
|
||||
"""
|
||||
Download a specific TV series episode.
|
||||
@app.get("/api/download/episode")
|
||||
async def call_download_episode(n_s: Optional[int] = Query(None), n_ep: Optional[int] = Query(None), titleID: Optional[int] = Query(None), slug: Optional[str] = Query(None)):
|
||||
global scrape_serie
|
||||
|
||||
if not n_s or not n_ep:
|
||||
logging.warning("Download episode request with missing parameters")
|
||||
raise HTTPException(status_code=400, detail="Missing season or episode number")
|
||||
|
||||
Returns:
|
||||
JSON response with download path or error message
|
||||
"""
|
||||
try:
|
||||
season_number = request.args.get('n_s')
|
||||
episode_number = request.args.get('n_ep')
|
||||
|
||||
if not season_number or not episode_number:
|
||||
logging.warning("Download episode request with missing parameters")
|
||||
return jsonify({'error': 'Missing season or episode number'}), 400
|
||||
|
||||
season_number = int(season_number)
|
||||
episode_number = int(episode_number)
|
||||
|
||||
scrape_serie.collect_title_season(season_number)
|
||||
|
||||
scrape_serie.setup(
|
||||
version=version,
|
||||
media_id=int(titleID),
|
||||
series_name=slug
|
||||
)
|
||||
video_source.setup(int(titleID))
|
||||
|
||||
scrape_serie.collect_info_title()
|
||||
scrape_serie.collect_info_season(n_s)
|
||||
|
||||
path_download = download_video(
|
||||
season_name,
|
||||
season_number,
|
||||
episode_number,
|
||||
n_s,
|
||||
n_ep,
|
||||
scrape_serie,
|
||||
video_source
|
||||
)
|
||||
@ -228,178 +205,151 @@ def call_download_episode():
|
||||
'type': 'tv',
|
||||
'id': scrape_serie.media_id,
|
||||
'slug': scrape_serie.series_name,
|
||||
'n_s': season_number,
|
||||
'n_ep': episode_number,
|
||||
'n_s': n_s,
|
||||
'n_ep': n_ep,
|
||||
'path': path_download,
|
||||
'timestamp': datetime.datetime.now(datetime.timezone.utc)
|
||||
}
|
||||
|
||||
downloads_collection.insert_one(download_data)
|
||||
|
||||
logging.info(f"Episode downloaded: S{season_number}E{episode_number}")
|
||||
return jsonify({'path': path_download}), 200
|
||||
|
||||
except ValueError:
|
||||
logging.error("Invalid season or episode number format")
|
||||
return jsonify({'error': 'Invalid season or episode number'}), 400
|
||||
|
||||
logging.info(f"Episode downloaded: S{n_s}E{n_ep}")
|
||||
return {"path": path_download}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error downloading episode: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Failed to download episode'}), 500
|
||||
raise HTTPException(status_code=500, detail="Failed to download episode")
|
||||
|
||||
@app.route('/downloaded/<path:filename>', methods=['GET'])
|
||||
def serve_downloaded_file(filename):
|
||||
"""
|
||||
Serve downloaded files with proper URL decoding and error handling.
|
||||
|
||||
Returns:
|
||||
Downloaded file or error message
|
||||
"""
|
||||
@app.get("/api/downloaded/{filename:path}")
|
||||
async def serve_downloaded_file(filename: str):
|
||||
try:
|
||||
# URL decode the filename
|
||||
# Decodifica il nome file
|
||||
decoded_filename = unquote(filename)
|
||||
logging.debug(f"Requested file: {decoded_filename}")
|
||||
|
||||
# Construct full file path
|
||||
file_path = os.path.join(DOWNLOAD_DIRECTORY, decoded_filename)
|
||||
logging.debug(f"Full file path: {file_path}")
|
||||
|
||||
# Verify file exists
|
||||
logging.info(f"Decoded filename: {decoded_filename}")
|
||||
|
||||
# Normalizza il percorso
|
||||
file_path = os.path.normpath(os.path.join(DOWNLOAD_DIRECTORY, decoded_filename))
|
||||
|
||||
# Verifica che il file sia all'interno della directory
|
||||
if not file_path.startswith(os.path.abspath(DOWNLOAD_DIRECTORY)):
|
||||
logging.error(f"Path traversal attempt detected: {file_path}")
|
||||
raise HTTPException(status_code=400, detail="Invalid file path")
|
||||
|
||||
# Verifica che il file esista
|
||||
if not os.path.isfile(file_path):
|
||||
logging.warning(f"File not found: {decoded_filename}")
|
||||
return jsonify({'error': 'File not found'}), 404
|
||||
|
||||
# Serve the file
|
||||
return send_from_directory(DOWNLOAD_DIRECTORY, decoded_filename, as_attachment=False)
|
||||
|
||||
logging.error(f"File not found: {file_path}")
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
|
||||
# Restituisci il file
|
||||
return FileResponse(file_path)
|
||||
except Exception as e:
|
||||
logging.error(f"Error serving file: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
# ---------- WATCHLIST UTIL MONGO ------------
|
||||
@app.post("/server/watchlist/add")
|
||||
async def add_to_watchlist(item: WatchlistItem):
|
||||
existing_item = watchlist_collection.find_one({
|
||||
'name': item.name,
|
||||
'url': item.url,
|
||||
'season': item.season
|
||||
})
|
||||
|
||||
# ---------- WATCHLIST MONGO ------------
|
||||
@app.route('/api/addWatchlist', methods=['POST'])
|
||||
def add_to_watchlist():
|
||||
title_name = request.json.get('name')
|
||||
title_url = request.json.get('url')
|
||||
season = request.json.get('season')
|
||||
if existing_item:
|
||||
logging.warning(f"Item already in watchlist: {item.name}")
|
||||
raise HTTPException(status_code=400, detail="Il titolo è già nella watchlist")
|
||||
|
||||
if title_url and season:
|
||||
watchlist_collection.insert_one({
|
||||
'name': item.name,
|
||||
'title_url': item.url,
|
||||
'season': item.season,
|
||||
'added_on': datetime.datetime.utcnow()
|
||||
})
|
||||
|
||||
existing_item = watchlist_collection.find_one({'name': title_name, 'url': title_url, 'season': season})
|
||||
if existing_item:
|
||||
return jsonify({'message': 'Il titolo è già nella watchlist'}), 400
|
||||
logging.info(f"Added to watchlist: {item.name}")
|
||||
return {"message": "Titolo aggiunto alla watchlist"}
|
||||
|
||||
watchlist_collection.insert_one({
|
||||
'name': title_name,
|
||||
'title_url': title_url,
|
||||
'season': season,
|
||||
'added_on': datetime.datetime.utcnow()
|
||||
})
|
||||
return jsonify({'message': 'Titolo aggiunto alla watchlist'}), 200
|
||||
else:
|
||||
return jsonify({'message': 'Missing title_url or season'}), 400
|
||||
@app.post("/server/watchlist/update")
|
||||
async def update_title_watchlist(update: UpdateWatchlistItem):
|
||||
result = watchlist_collection.update_one(
|
||||
{'title_url': update.url},
|
||||
{'$set': {'season': update.season}}
|
||||
)
|
||||
|
||||
@app.route('/api/updateTitleWatchlist', methods=['POST'])
|
||||
def update_title_watchlist():
|
||||
print(request.json)
|
||||
|
||||
title_url = request.json.get('url')
|
||||
new_season = request.json.get('season')
|
||||
|
||||
if title_url is not None and new_season is not None:
|
||||
result = watchlist_collection.update_one(
|
||||
{'title_url': title_url},
|
||||
{'$set': {'season': new_season}}
|
||||
)
|
||||
|
||||
if result.matched_count == 0:
|
||||
return jsonify({'message': 'Titolo non trovato nella watchlist'}), 404
|
||||
|
||||
if result.modified_count == 0:
|
||||
return jsonify({'message': 'La stagione non è cambiata'}), 200
|
||||
|
||||
return jsonify({'message': 'Stagione aggiornata con successo'}), 200
|
||||
if result.matched_count == 0:
|
||||
logging.warning(f"Item not found for update: {update.url}")
|
||||
raise HTTPException(status_code=404, detail="Titolo non trovato nella watchlist")
|
||||
|
||||
else:
|
||||
return jsonify({'message': 'Missing title_url or season'}), 400
|
||||
|
||||
@app.route('/api/removeWatchlist', methods=['POST'])
|
||||
def remove_from_watchlist():
|
||||
title_name = request.json.get('name')
|
||||
if result.modified_count == 0:
|
||||
logging.info(f"Season unchanged for: {update.url}")
|
||||
return {"message": "La stagione non è cambiata"}
|
||||
|
||||
if title_name:
|
||||
result = watchlist_collection.delete_one({'name': title_name})
|
||||
logging.info(f"Updated season for: {update.url}")
|
||||
return {"message": "Stagione aggiornata con successo"}
|
||||
|
||||
if result.deleted_count == 1:
|
||||
return jsonify({'message': 'Titolo rimosso dalla watchlist'}), 200
|
||||
else:
|
||||
return jsonify({'message': 'Titolo non trovato nella watchlist'}), 404
|
||||
else:
|
||||
return jsonify({'message': 'Missing title_url or season'}), 400
|
||||
|
||||
@app.route('/api/getWatchlist', methods=['GET'])
|
||||
def get_watchlist():
|
||||
watchlist_items = list(watchlist_collection.find({}, {'_id': 0}))
|
||||
@app.post("/server/watchlist/remove")
|
||||
async def remove_from_watchlist(item: WatchlistItem):
|
||||
# You can handle just the 'name' field here
|
||||
result = watchlist_collection.delete_one({'name': item.name})
|
||||
|
||||
if watchlist_items:
|
||||
return jsonify(watchlist_items), 200
|
||||
else:
|
||||
return jsonify({'message': 'La watchlist è vuota'}), 200
|
||||
if result.deleted_count == 0:
|
||||
logging.warning(f"Item not found for removal: {item.name}")
|
||||
raise HTTPException(status_code=404, detail="Titolo non trovato nella watchlist")
|
||||
|
||||
@app.route('/api/checkWatchlist', methods=['GET'])
|
||||
def get_newSeason():
|
||||
title_newSeasons = []
|
||||
logging.info(f"Successfully removed from watchlist: {item.name}")
|
||||
return {"message": "Titolo rimosso dalla watchlist"}
|
||||
|
||||
@app.get("/server/watchlist/get")
|
||||
async def get_watchlist():
|
||||
watchlist_items = list(watchlist_collection.find({}, {'_id': 0}))
|
||||
|
||||
if not watchlist_items:
|
||||
return jsonify({'message': 'La watchlist è vuota'}), 200
|
||||
logging.info("Watchlist is empty")
|
||||
return {"message": "La watchlist è vuota"}
|
||||
|
||||
logging.info("Watchlist retrieved")
|
||||
return watchlist_items
|
||||
|
||||
@app.get("/server/watchlist/check")
|
||||
async def get_new_season():
|
||||
title_new_seasons = []
|
||||
watchlist_items = list(watchlist_collection.find({}, {'_id': 0}))
|
||||
logging.error("GET: ", watchlist_items)
|
||||
|
||||
if not watchlist_items:
|
||||
logging.info("Watchlist is empty")
|
||||
return {"message": "La watchlist è vuota"}
|
||||
|
||||
for item in watchlist_items:
|
||||
title_url = item.get('title_url')
|
||||
if not title_url:
|
||||
continue
|
||||
|
||||
try:
|
||||
parsed_url = urlparse(title_url)
|
||||
hostname = parsed_url.hostname
|
||||
domain_part = hostname.split('.')[1]
|
||||
new_url = title_url.replace(domain_part, domain)
|
||||
new_url = updateUrl(item['title_url'], domain)
|
||||
|
||||
result = get_infoSelectTitle(new_url, domain, version)
|
||||
|
||||
if not result or 'season_count' not in result:
|
||||
continue
|
||||
continue
|
||||
|
||||
number_season = result.get("season_count")
|
||||
|
||||
if number_season > item.get("season"):
|
||||
title_newSeasons.append({
|
||||
'title_url': item.get('title_url'),
|
||||
'name': item.get('name'),
|
||||
'season': int(number_season),
|
||||
'nNewSeason': int(number_season) - int(item.get("season"))
|
||||
title_new_seasons.append({
|
||||
'title_url': item['title_url'],
|
||||
'name': item['name'],
|
||||
'season': number_season,
|
||||
'nNewSeason': number_season - item['season']
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
print(f"Errore nel recuperare informazioni per {item.get('title_url')}: {e}")
|
||||
logging.error(f"Error checking new season for {item['title_url']}: {e}")
|
||||
|
||||
if title_newSeasons:
|
||||
return jsonify(title_newSeasons), 200
|
||||
else:
|
||||
return jsonify({'message': 'Nessuna nuova stagione disponibile'}), 200
|
||||
if title_new_seasons:
|
||||
logging.info(f"New seasons found: {len(title_new_seasons)}")
|
||||
return title_new_seasons
|
||||
|
||||
return {"message": "Nessuna nuova stagione disponibile"}
|
||||
|
||||
|
||||
|
||||
# ---------- DOWNLOAD MONGO ------------
|
||||
# ---------- DOWNLOAD UTIL MONGO ------------
|
||||
def ensure_collections_exist(db):
|
||||
"""
|
||||
Ensures that the required collections exist in the database.
|
||||
If they do not exist, they are created.
|
||||
|
||||
Args:
|
||||
db: The MongoDB database object.
|
||||
"""
|
||||
required_collections = ['watchlist', 'downloads']
|
||||
existing_collections = db.list_collection_names()
|
||||
|
||||
@ -411,190 +361,122 @@ def ensure_collections_exist(db):
|
||||
else:
|
||||
logging.info(f"Collection already exists: {collection_name}")
|
||||
|
||||
@app.route('/downloads', methods=['GET'])
|
||||
def fetch_all_downloads():
|
||||
"""
|
||||
Endpoint to fetch all downloads.
|
||||
"""
|
||||
|
||||
@app.get("/server/path/get")
|
||||
async def fetch_all_downloads():
|
||||
try:
|
||||
downloads = list(downloads_collection.find({}, {'_id': 0}))
|
||||
return jsonify(downloads), 200
|
||||
logging.info("Downloads retrieved")
|
||||
return downloads
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error fetching all downloads: {str(e)}")
|
||||
return []
|
||||
logging.error(f"Error fetching downloads: {e}")
|
||||
raise HTTPException(status_code=500, detail="Errore nel recupero dei download")
|
||||
|
||||
@app.get("/server/path/movie")
|
||||
async def fetch_movie_path(id: Optional[int] = Query(None)):
|
||||
if not id:
|
||||
logging.warning("Movie path request without ID parameter")
|
||||
raise HTTPException(status_code=400, detail="Missing movie ID")
|
||||
|
||||
@app.route('/deleteEpisode', methods=['DELETE'])
|
||||
def remove_episode():
|
||||
"""
|
||||
Endpoint to delete a specific episode and its file.
|
||||
"""
|
||||
try:
|
||||
series_id = request.args.get('id')
|
||||
season_number = request.args.get('season')
|
||||
episode_number = request.args.get('episode')
|
||||
|
||||
if not series_id or not season_number or not episode_number:
|
||||
return jsonify({'error': 'Missing parameters (id, season, episode)'}), 400
|
||||
|
||||
try:
|
||||
series_id = int(series_id)
|
||||
season_number = int(season_number)
|
||||
episode_number = int(episode_number)
|
||||
except ValueError:
|
||||
return jsonify({'error': 'Invalid season or episode number'}), 400
|
||||
|
||||
# Trova il percorso del file
|
||||
episode = downloads_collection.find_one({
|
||||
'type': 'tv',
|
||||
'id': series_id,
|
||||
'n_s': season_number,
|
||||
'n_ep': episode_number
|
||||
}, {'_id': 0, 'path': 1})
|
||||
|
||||
if not episode or 'path' not in episode:
|
||||
return jsonify({'error': 'Episode not found'}), 404
|
||||
|
||||
file_path = episode['path']
|
||||
|
||||
# Elimina il file fisico
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
logging.info(f"Deleted episode file: {file_path}")
|
||||
else:
|
||||
logging.warning(f"Episode file not found: {file_path}")
|
||||
except Exception as e:
|
||||
logging.error(f"Error deleting episode file: {str(e)}")
|
||||
|
||||
# Rimuovi l'episodio dal database
|
||||
result = downloads_collection.delete_one({
|
||||
'type': 'tv',
|
||||
'id': series_id,
|
||||
'n_s': season_number,
|
||||
'n_ep': episode_number
|
||||
})
|
||||
|
||||
if result.deleted_count > 0:
|
||||
return jsonify({'success': True}), 200
|
||||
else:
|
||||
return jsonify({'error': 'Failed to delete episode from database'}), 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error deleting episode: {str(e)}")
|
||||
return jsonify({'error': 'Failed to delete episode'}), 500
|
||||
|
||||
@app.route('/deleteMovie', methods=['DELETE'])
|
||||
def remove_movie():
|
||||
"""
|
||||
Endpoint to delete a specific movie, its file, and its parent folder if empty.
|
||||
"""
|
||||
try:
|
||||
movie_id = request.args.get('id')
|
||||
|
||||
if not movie_id:
|
||||
return jsonify({'error': 'Missing movie ID'}), 400
|
||||
|
||||
# Trova il percorso del file
|
||||
movie = downloads_collection.find_one({'type': 'movie', 'id': movie_id}, {'_id': 0, 'path': 1})
|
||||
|
||||
if not movie or 'path' not in movie:
|
||||
return jsonify({'error': 'Movie not found'}), 404
|
||||
|
||||
file_path = movie['path']
|
||||
parent_folder = os.path.dirname(file_path)
|
||||
|
||||
# Elimina il file fisico
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
logging.info(f"Deleted movie file: {file_path}")
|
||||
else:
|
||||
logging.warning(f"Movie file not found: {file_path}")
|
||||
except Exception as e:
|
||||
logging.error(f"Error deleting movie file: {str(e)}")
|
||||
|
||||
# Elimina la cartella superiore se vuota
|
||||
try:
|
||||
if os.path.exists(parent_folder) and not os.listdir(parent_folder):
|
||||
os.rmdir(parent_folder)
|
||||
logging.info(f"Deleted empty parent folder: {parent_folder}")
|
||||
except Exception as e:
|
||||
logging.error(f"Error deleting parent folder: {str(e)}")
|
||||
|
||||
# Rimuovi il film dal database
|
||||
result = downloads_collection.delete_one({'type': 'movie', 'id': movie_id})
|
||||
|
||||
if result.deleted_count > 0:
|
||||
return jsonify({'success': True}), 200
|
||||
else:
|
||||
return jsonify({'error': 'Failed to delete movie from database'}), 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error deleting movie: {str(e)}")
|
||||
return jsonify({'error': 'Failed to delete movie'}), 500
|
||||
|
||||
@app.route('/moviePath', methods=['GET'])
|
||||
def fetch_movie_path():
|
||||
"""
|
||||
Endpoint to fetch the path of a specific movie.
|
||||
"""
|
||||
try:
|
||||
movie_id = int(request.args.get('id'))
|
||||
|
||||
if not movie_id:
|
||||
return jsonify({'error': 'Missing movie ID'}), 400
|
||||
|
||||
movie = downloads_collection.find_one({'type': 'movie', 'id': movie_id}, {'_id': 0, 'path': 1})
|
||||
movie = downloads_collection.find_one(
|
||||
{'type': 'movie', 'id': id},
|
||||
{'_id': 0, 'path': 1}
|
||||
)
|
||||
|
||||
if movie and 'path' in movie:
|
||||
return jsonify({'path': movie['path']}), 200
|
||||
logging.info(f"Movie path retrieved: {movie['path']}")
|
||||
return {"path": movie['path']}
|
||||
|
||||
else:
|
||||
return jsonify({'error': 'Movie not found'}), 404
|
||||
|
||||
logging.warning(f"Movie not found: ID {id}")
|
||||
raise HTTPException(status_code=404, detail="Movie not found")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error fetching movie path: {str(e)}")
|
||||
return jsonify({'error': 'Failed to fetch movie path'}), 500
|
||||
logging.error(f"Error fetching movie path: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Failed to fetch movie path")
|
||||
|
||||
@app.get("/server/path/episode")
|
||||
async def fetch_episode_path(id: Optional[int] = Query(None), season: Optional[int] = Query(None), episode: Optional[int] = Query(None)):
|
||||
if not id or not season or not episode:
|
||||
logging.warning("Episode path request with missing parameters")
|
||||
raise HTTPException(status_code=400, detail="Missing parameters (id, season, episode)")
|
||||
|
||||
@app.route('/episodePath', methods=['GET'])
|
||||
def fetch_episode_path():
|
||||
"""
|
||||
Endpoint to fetch the path of a specific episode.
|
||||
"""
|
||||
try:
|
||||
series_id = request.args.get('id')
|
||||
season_number = request.args.get('season')
|
||||
episode_number = request.args.get('episode')
|
||||
episode_data = downloads_collection.find_one(
|
||||
{'type': 'tv', 'id': id, 'n_s': season, 'n_ep': episode},
|
||||
{'_id': 0, 'path': 1}
|
||||
)
|
||||
|
||||
if not series_id or not season_number or not episode_number:
|
||||
return jsonify({'error': 'Missing parameters (id, season, episode)'}), 400
|
||||
|
||||
try:
|
||||
series_id = int(series_id)
|
||||
season_number = int(season_number)
|
||||
episode_number = int(episode_number)
|
||||
except ValueError:
|
||||
return jsonify({'error': 'Invalid season or episode number'}), 400
|
||||
|
||||
episode = downloads_collection.find_one({
|
||||
'type': 'tv',
|
||||
'id': series_id,
|
||||
'n_s': season_number,
|
||||
'n_ep': episode_number
|
||||
}, {'_id': 0, 'path': 1})
|
||||
|
||||
if episode and 'path' in episode:
|
||||
return jsonify({'path': episode['path']}), 200
|
||||
if episode_data and 'path' in episode_data:
|
||||
logging.info(f"Episode path retrieved: {episode_data['path']}")
|
||||
return {"path": episode_data['path']}
|
||||
|
||||
else:
|
||||
return jsonify({'error': 'Episode not found'}), 404
|
||||
|
||||
logging.warning(f"Episode not found: ID {id}, Season {season}, Episode {episode}")
|
||||
raise HTTPException(status_code=404, detail="Episode not found")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error fetching episode path: {str(e)}")
|
||||
return jsonify({'error': 'Failed to fetch episode path'}), 500
|
||||
logging.error(f"Error fetching episode path: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Failed to fetch episode path")
|
||||
|
||||
@app.delete("/server/delete/episode")
|
||||
async def remove_episode(series_id: int = Query(...), season_number: int = Query(...), episode_number: int = Query(...)):
|
||||
episode = downloads_collection.find_one({
|
||||
'type': 'tv',
|
||||
'id': series_id,
|
||||
'n_s': season_number,
|
||||
'n_ep': episode_number
|
||||
}, {'_id': 0, 'path': 1})
|
||||
|
||||
if not episode:
|
||||
logging.warning(f"Episode not found: S{season_number}E{episode_number}")
|
||||
raise HTTPException(status_code=404, detail="Episodio non trovato")
|
||||
|
||||
file_path = episode.get('path')
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
logging.info(f"Episode file deleted: {file_path}")
|
||||
|
||||
downloads_collection.delete_one({
|
||||
'type': 'tv',
|
||||
'id': series_id,
|
||||
'n_s': season_number,
|
||||
'n_ep': episode_number
|
||||
})
|
||||
|
||||
return {"success": True}
|
||||
|
||||
@app.delete("/server/delete/movie")
|
||||
async def remove_movie(movie_id: int = Query(...)):
|
||||
movie = downloads_collection.find_one({'type': 'movie', 'id': movie_id}, {'_id': 0, 'path': 1})
|
||||
|
||||
if not movie:
|
||||
logging.warning(f"Movie not found: ID {movie_id}")
|
||||
raise HTTPException(status_code=404, detail="Film non trovato")
|
||||
|
||||
file_path = movie.get('path')
|
||||
parent_folder = os.path.dirname(file_path)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
logging.info(f"Movie file deleted: {file_path}")
|
||||
|
||||
if os.path.exists(parent_folder) and not os.listdir(parent_folder):
|
||||
os.rmdir(parent_folder)
|
||||
logging.info(f"Parent folder deleted: {parent_folder}")
|
||||
|
||||
downloads_collection.delete_one({'type': 'movie', 'id': movie_id})
|
||||
return {"success": True}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ensure_collections_exist(db)
|
||||
app.run(debug=True, port=1234, threaded=True)
|
||||
uvicorn.run(
|
||||
"server:app",
|
||||
host="127.0.0.1",
|
||||
port=1234,
|
||||
reload=False
|
||||
)
|
23
server_type.py
Normal file
23
server_type.py
Normal file
@ -0,0 +1,23 @@
|
||||
# 13.12.24
|
||||
|
||||
import datetime
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, Dict, List
|
||||
|
||||
|
||||
class WatchlistItem(BaseModel):
|
||||
name: str
|
||||
url: Optional[str] = None
|
||||
title_url: Optional[str] = None
|
||||
season: Optional[int] = None
|
||||
added_on: Optional[datetime.datetime] = None
|
||||
|
||||
class UpdateWatchlistItem(BaseModel):
|
||||
url: str = None
|
||||
season: int = None
|
||||
|
||||
class DownloadRequest(BaseModel):
|
||||
id: str
|
||||
slug: Optional[str] = None
|
||||
season: Optional[int] = None
|
||||
episode: Optional[int] = None
|
13
server_util.py
Normal file
13
server_util.py
Normal file
@ -0,0 +1,13 @@
|
||||
# 13.12.24
|
||||
|
||||
from urllib.parse import urlparse, unquote
|
||||
|
||||
|
||||
def updateUrl(oldUlr: str, domain: str):
|
||||
|
||||
parsed_url = urlparse(oldUlr)
|
||||
hostname = parsed_url.hostname
|
||||
domain_part = hostname.split('.')[1]
|
||||
new_url = oldUlr.replace(domain_part, domain)
|
||||
|
||||
return new_url
|
Loading…
x
Reference in New Issue
Block a user