mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-06 11:35:29 +00:00
Migrate to fastapi.
This commit is contained in:
parent
172b09ea46
commit
c093687e9f
@ -1,17 +1,22 @@
|
|||||||
# 23.11.24
|
# 23.11.24
|
||||||
|
|
||||||
import re
|
|
||||||
import logging
|
|
||||||
from typing import Dict, Any, List, Union
|
from typing import Dict, Any, List, Union
|
||||||
|
|
||||||
|
|
||||||
class Episode:
|
class Episode:
|
||||||
def __init__(self, data: Dict[str, Any]):
|
def __init__(self, data: Dict[str, Any]):
|
||||||
self.id: int = data.get('id', '')
|
self.images = None
|
||||||
self.number: int = data.get('number', '')
|
self.data = data
|
||||||
self.name: str = data.get('name', '')
|
|
||||||
self.plot: str = data.get('plot', '')
|
self.id: int = data.get('id')
|
||||||
self.duration: int = data.get('duration', '')
|
self.scws_id: int = data.get('scws_id')
|
||||||
|
self.number: int = data.get('number')
|
||||||
|
self.name: str = data.get('name')
|
||||||
|
self.plot: str = data.get('plot')
|
||||||
|
self.duration: int = data.get('duration')
|
||||||
|
|
||||||
|
def collect_image(self, SITE_NAME, domain):
|
||||||
|
self.image = f"https://cdn.{SITE_NAME}.{domain}/images/{self.data.get('images')[0]['filename']}"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Episode(id={self.id}, number={self.number}, name='{self.name}', plot='{self.plot}', duration={self.duration} sec)"
|
return f"Episode(id={self.id}, number={self.number}, name='{self.name}', plot='{self.plot}', duration={self.duration} sec)"
|
||||||
@ -20,7 +25,7 @@ class EpisodeManager:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.episodes: List[Episode] = []
|
self.episodes: List[Episode] = []
|
||||||
|
|
||||||
def add_episode(self, episode_data: Dict[str, Any]):
|
def add(self, episode_data: Dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
Add a new episode to the manager.
|
Add a new episode to the manager.
|
||||||
|
|
||||||
@ -29,8 +34,20 @@ class EpisodeManager:
|
|||||||
"""
|
"""
|
||||||
episode = Episode(episode_data)
|
episode = Episode(episode_data)
|
||||||
self.episodes.append(episode)
|
self.episodes.append(episode)
|
||||||
|
|
||||||
|
def get(self, index: int) -> Episode:
|
||||||
|
"""
|
||||||
|
Retrieve an episode by its index in the episodes list.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- index (int): The zero-based index of the episode to retrieve.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Episode: The Episode object at the specified index.
|
||||||
|
"""
|
||||||
|
return self.episodes[index]
|
||||||
|
|
||||||
def get_length(self) -> int:
|
def length(self) -> int:
|
||||||
"""
|
"""
|
||||||
Get the number of episodes in the manager.
|
Get the number of episodes in the manager.
|
||||||
|
|
||||||
@ -54,61 +71,23 @@ class EpisodeManager:
|
|||||||
|
|
||||||
class Season:
|
class Season:
|
||||||
def __init__(self, season_data: Dict[str, Union[int, str, None]]):
|
def __init__(self, season_data: Dict[str, Union[int, str, None]]):
|
||||||
|
self.images = {}
|
||||||
|
self.season_data = season_data
|
||||||
|
|
||||||
self.id: int = season_data.get('id')
|
self.id: int = season_data.get('id')
|
||||||
|
self.scws_id: int = season_data.get('scws_id')
|
||||||
|
self.imdb_id: int = season_data.get('imdb_id')
|
||||||
self.number: int = season_data.get('number')
|
self.number: int = season_data.get('number')
|
||||||
self.name: str = season_data.get('name')
|
self.name: str = season_data.get('name')
|
||||||
|
self.slug: str = season_data.get('slug')
|
||||||
self.plot: str = season_data.get('plot')
|
self.plot: str = season_data.get('plot')
|
||||||
self.episodes_count: int = season_data.get('episodes_count')
|
self.type: str = season_data.get('type')
|
||||||
|
self.seasons_count: int = season_data.get('seasons_count')
|
||||||
def __str__(self):
|
self.episodes: EpisodeManager = EpisodeManager()
|
||||||
return f"Season(id={self.id}, number={self.number}, name='{self.name}', plot='{self.plot}', episodes_count={self.episodes_count})"
|
|
||||||
|
def collect_images(self, SITE_NAME, domain):
|
||||||
class SeasonManager:
|
for dict_image in self.season_data.get('images'):
|
||||||
def __init__(self):
|
self.images[dict_image.get('type')] = f"https://cdn.{SITE_NAME}.{domain}/images/{dict_image.get('filename')}"
|
||||||
self.seasons: List[Season] = []
|
|
||||||
|
|
||||||
def add_season(self, season_data: Dict[str, Union[int, str, None]]):
|
|
||||||
"""
|
|
||||||
Add a new season to the manager.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
season_data (Dict[str, Union[int, str, None]]): A dictionary containing data for the new season.
|
|
||||||
"""
|
|
||||||
season = Season(season_data)
|
|
||||||
self.seasons.append(season)
|
|
||||||
|
|
||||||
def get(self, index: int) -> Season:
|
|
||||||
"""
|
|
||||||
Get a season item from the list by index.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
index (int): The index of the seasons item to retrieve.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Season: The media item at the specified index.
|
|
||||||
"""
|
|
||||||
return self.media_list[index]
|
|
||||||
|
|
||||||
def get_length(self) -> int:
|
|
||||||
"""
|
|
||||||
Get the number of seasons in the manager.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
int: Number of seasons.
|
|
||||||
"""
|
|
||||||
return len(self.seasons)
|
|
||||||
|
|
||||||
def clear(self) -> None:
|
|
||||||
"""
|
|
||||||
This method clears the seasons list.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
self: The object instance.
|
|
||||||
"""
|
|
||||||
self.seasons.clear()
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"SeasonManager(num_seasons={len(self.seasons)})"
|
|
||||||
|
|
||||||
|
|
||||||
class Stream:
|
class Stream:
|
||||||
|
@ -120,8 +120,7 @@ class VideoSource:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("\n")
|
logging.error(f"Failed to get vixcloud contente with error: {e}")
|
||||||
console.print(Panel("[red bold]Coming soon", title="Notification", title_align="left", border_style="yellow"))
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# Parse response with BeautifulSoup to get content
|
# Parse response with BeautifulSoup to get content
|
||||||
@ -169,6 +168,56 @@ class VideoSource:
|
|||||||
# Construct the new URL with updated query parameters
|
# Construct the new URL with updated query parameters
|
||||||
return urlunparse(parsed_url._replace(query=query_string))
|
return urlunparse(parsed_url._replace(query=query_string))
|
||||||
|
|
||||||
|
def get_mp4(self, url_to_download: str, scws_id: str) -> list:
|
||||||
|
"""
|
||||||
|
Generate download links for the specified resolutions from StreamingCommunity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url_to_download (str): URL of the video page.
|
||||||
|
scws_id (str): SCWS ID of the title.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: A list of video download URLs.
|
||||||
|
"""
|
||||||
|
headers = {
|
||||||
|
'referer': url_to_download,
|
||||||
|
'user-agent': get_headers(),
|
||||||
|
}
|
||||||
|
|
||||||
|
# API request to get video details
|
||||||
|
video_api_url = f'https://{self.base_name}.{self.domain}/api/video/{scws_id}'
|
||||||
|
response = httpx.get(video_api_url, headers=headers)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
response_json = response.json()
|
||||||
|
|
||||||
|
video_tracks = response_json.get('video_tracks', [])
|
||||||
|
track = video_tracks[-1]
|
||||||
|
console.print(f"[cyan]Available resolutions: [red]{[str(track['quality']) for track in video_tracks]}")
|
||||||
|
|
||||||
|
# Request download link generation for each track
|
||||||
|
download_response = httpx.post(
|
||||||
|
url=f'https://{self.base_name}.{self.domain}/api/download/generate_link?scws_id={track["video_id"]}&rendition={track["quality"]}',
|
||||||
|
headers={
|
||||||
|
'referer': url_to_download,
|
||||||
|
'user-agent': get_headers(),
|
||||||
|
'x-xsrf-token': config_manager.get("SITE", self.base_name)['extra']['x-xsrf-token']
|
||||||
|
},
|
||||||
|
cookies={
|
||||||
|
'streamingcommunity_session': config_manager.get("SITE", self.base_name)['extra']['streamingcommunity_session']
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if download_response.status_code == 200:
|
||||||
|
return {'url': download_response.text, 'quality': track["quality"]}
|
||||||
|
|
||||||
|
else:
|
||||||
|
logging.error(f"Failed to generate link for resolution {track['quality']} (HTTP {download_response.status_code}).")
|
||||||
|
|
||||||
|
else:
|
||||||
|
logging.error(f"Error fetching video API URL (HTTP {response.status_code}).")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
class VideoSourceAnime(VideoSource):
|
class VideoSourceAnime(VideoSource):
|
||||||
def __init__(self, site_name: str):
|
def __init__(self, site_name: str):
|
||||||
@ -221,4 +270,4 @@ class VideoSourceAnime(VideoSource):
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error fetching embed URL: {e}")
|
logging.error(f"Error fetching embed URL: {e}")
|
||||||
return None
|
return None
|
@ -19,6 +19,7 @@ _useFor = "film_serie"
|
|||||||
_deprecate = False
|
_deprecate = False
|
||||||
_priority = 1
|
_priority = 1
|
||||||
_engineDownload = "hls"
|
_engineDownload = "hls"
|
||||||
|
from .costant import SITE_NAME
|
||||||
|
|
||||||
|
|
||||||
def search(string_to_search: str = None, get_onylDatabase: bool = False):
|
def search(string_to_search: str = None, get_onylDatabase: bool = False):
|
||||||
@ -27,7 +28,7 @@ def search(string_to_search: str = None, get_onylDatabase: bool = False):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if string_to_search is None:
|
if string_to_search is None:
|
||||||
string_to_search = msg.ask("\n[purple]Insert word to search in all site").strip()
|
string_to_search = msg.ask(f"\n[purple]Insert word to search in [red]{SITE_NAME}").strip()
|
||||||
|
|
||||||
# Get site domain and version and get result of the search
|
# Get site domain and version and get result of the search
|
||||||
site_version, domain = get_version_and_domain()
|
site_version, domain = get_version_and_domain()
|
||||||
|
@ -52,13 +52,9 @@ def download_film(select_title: MediaItem):
|
|||||||
mp4_path = os.path.join(ROOT_PATH, SITE_NAME, MOVIE_FOLDER, select_title.slug)
|
mp4_path = os.path.join(ROOT_PATH, SITE_NAME, MOVIE_FOLDER, select_title.slug)
|
||||||
|
|
||||||
# Download the film using the m3u8 playlist, and output filename
|
# Download the film using the m3u8 playlist, and output filename
|
||||||
r_proc = HLS_Downloader(
|
HLS_Downloader(
|
||||||
m3u8_playlist=master_playlist,
|
m3u8_playlist=master_playlist,
|
||||||
output_filename=os.path.join(mp4_path, title_name)
|
output_filename=os.path.join(mp4_path, title_name)
|
||||||
).start()
|
).start()
|
||||||
|
|
||||||
if r_proc != None:
|
|
||||||
console.print("[green]Result: ")
|
|
||||||
console.print(r_proc)
|
|
||||||
|
|
||||||
return os.path.join(mp4_path, title_name)
|
return os.path.join(mp4_path, title_name)
|
||||||
|
@ -40,15 +40,16 @@ def download_video(tv_name: str, index_season_selected: int, index_episode_selec
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
start_message()
|
start_message()
|
||||||
|
tv_name = scrape_serie.season_manager.slug
|
||||||
|
|
||||||
# Get info about episode
|
# Get info about episode
|
||||||
obj_episode = scrape_serie.obj_episode_manager.episodes[index_episode_selected - 1]
|
obj_episode = scrape_serie.episode_manager.get(index_episode_selected - 1)
|
||||||
console.print(f"[yellow]Download: [red]{index_season_selected}:{index_episode_selected} {obj_episode.name}")
|
console.print(f"[yellow]Download: [red]{index_season_selected}:{index_episode_selected} {obj_episode.name}")
|
||||||
print()
|
print()
|
||||||
|
|
||||||
# Define filename and path for the downloaded video
|
# Define filename and path for the downloaded video
|
||||||
mp4_name = f"{map_episode_title(tv_name, index_season_selected, index_episode_selected, obj_episode.name)}.mp4"
|
mp4_name = f"{map_episode_title(tv_name, index_season_selected, index_episode_selected, obj_episode.name)}.mp4"
|
||||||
mp4_path = os.path.join(ROOT_PATH, SITE_NAME, SERIES_FOLDER, tv_name, f"S{index_season_selected}")
|
mp4_path = os.path.join(ROOT_PATH, SITE_NAME, SERIES_FOLDER, tv_name, f"S{index_season_selected}")
|
||||||
|
|
||||||
# Retrieve scws and if available master playlist
|
# Retrieve scws and if available master playlist
|
||||||
video_source.get_iframe(obj_episode.id)
|
video_source.get_iframe(obj_episode.id)
|
||||||
@ -56,14 +57,10 @@ def download_video(tv_name: str, index_season_selected: int, index_episode_selec
|
|||||||
master_playlist = video_source.get_playlist()
|
master_playlist = video_source.get_playlist()
|
||||||
|
|
||||||
# Download the episode
|
# Download the episode
|
||||||
r_proc = HLS_Downloader(
|
HLS_Downloader(
|
||||||
m3u8_playlist=master_playlist,
|
m3u8_playlist=master_playlist,
|
||||||
output_filename=os.path.join(mp4_path, mp4_name)
|
output_filename=os.path.join(mp4_path, mp4_name)
|
||||||
).start()
|
).start()
|
||||||
|
|
||||||
if r_proc != None:
|
|
||||||
console.print("[green]Result: ")
|
|
||||||
console.print(r_proc)
|
|
||||||
|
|
||||||
return os.path.join(mp4_path, mp4_name)
|
return os.path.join(mp4_path, mp4_name)
|
||||||
|
|
||||||
@ -78,13 +75,12 @@ def download_episode(tv_name: str, index_season_selected: int, scrape_serie: Scr
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Clean memory of all episodes and get the number of the season
|
# Clean memory of all episodes and get the number of the season
|
||||||
scrape_serie.obj_episode_manager.clear()
|
scrape_serie.episode_manager.clear()
|
||||||
season_number = scrape_serie.obj_season_manager.seasons[index_season_selected - 1].number
|
|
||||||
|
|
||||||
# Start message and collect information about episodes
|
# Start message and collect information about episodes
|
||||||
start_message()
|
start_message()
|
||||||
scrape_serie.collect_title_season(season_number)
|
scrape_serie.collect_info_season(index_season_selected)
|
||||||
episodes_count = scrape_serie.obj_episode_manager.get_length()
|
episodes_count = scrape_serie.episode_manager.length()
|
||||||
|
|
||||||
if download_all:
|
if download_all:
|
||||||
|
|
||||||
@ -131,8 +127,8 @@ def download_series(select_season: MediaItem, version: str) -> None:
|
|||||||
video_source.setup(select_season.id)
|
video_source.setup(select_season.id)
|
||||||
|
|
||||||
# Collect information about seasons
|
# Collect information about seasons
|
||||||
scrape_serie.collect_info_seasons()
|
scrape_serie.collect_info_title()
|
||||||
seasons_count = scrape_serie.obj_season_manager.get_length()
|
seasons_count = scrape_serie.season_manager.seasons_count
|
||||||
|
|
||||||
# Prompt user for season selection and download episodes
|
# Prompt user for season selection and download episodes
|
||||||
console.print(f"\n[green]Seasons found: [red]{seasons_count}")
|
console.print(f"\n[green]Seasons found: [red]{seasons_count}")
|
||||||
@ -182,7 +178,7 @@ def display_episodes_list(scrape_serie) -> str:
|
|||||||
table_show_manager.add_column(column_info)
|
table_show_manager.add_column(column_info)
|
||||||
|
|
||||||
# Populate the table with episodes information
|
# Populate the table with episodes information
|
||||||
for i, media in enumerate(scrape_serie.obj_episode_manager.episodes):
|
for i, media in enumerate(scrape_serie.episode_manager.episodes):
|
||||||
table_show_manager.add_tv_show({
|
table_show_manager.add_tv_show({
|
||||||
'Index': str(media.number),
|
'Index': str(media.number),
|
||||||
'Name': media.name,
|
'Name': media.name,
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import secrets
|
||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
@ -31,7 +32,7 @@ from .costant import SITE_NAME
|
|||||||
# Variable
|
# Variable
|
||||||
media_search_manager = MediaManager()
|
media_search_manager = MediaManager()
|
||||||
table_show_manager = TVShowManager()
|
table_show_manager = TVShowManager()
|
||||||
|
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
||||||
|
|
||||||
|
|
||||||
def get_version(text: str):
|
def get_version(text: str):
|
||||||
@ -52,7 +53,7 @@ def get_version(text: str):
|
|||||||
|
|
||||||
# Extract version
|
# Extract version
|
||||||
version = json.loads(soup.find("div", {"id": "app"}).get("data-page"))['version']
|
version = json.loads(soup.find("div", {"id": "app"}).get("data-page"))['version']
|
||||||
#console.print(f"[cyan]Get version [white]=> [red]{version} \n")
|
console.print(f"[cyan]Get version [white]=> [red]{version} \n")
|
||||||
|
|
||||||
return version
|
return version
|
||||||
|
|
||||||
@ -74,7 +75,17 @@ def get_version_and_domain():
|
|||||||
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
domain_to_use, base_url = search_domain(SITE_NAME, f"https://{SITE_NAME}")
|
||||||
|
|
||||||
# Extract version from the response
|
# Extract version from the response
|
||||||
version = get_version(httpx.get(base_url, headers={'user-agent': get_headers()}).text)
|
try:
|
||||||
|
version = get_version(httpx.get(
|
||||||
|
url=base_url,
|
||||||
|
headers={
|
||||||
|
'user-agent': get_headers()
|
||||||
|
},
|
||||||
|
timeout=max_timeout
|
||||||
|
).text)
|
||||||
|
except:
|
||||||
|
console.print("[green]Auto generate version ...")
|
||||||
|
version = secrets.token_hex(32 // 2)
|
||||||
|
|
||||||
return version, domain_to_use
|
return version, domain_to_use
|
||||||
|
|
||||||
@ -90,10 +101,6 @@ def title_search(title_search: str, domain: str) -> int:
|
|||||||
Returns:
|
Returns:
|
||||||
int: The number of titles found.
|
int: The number of titles found.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
||||||
|
|
||||||
# Send request to search for titles ( replace à to a and space to "+" )
|
|
||||||
try:
|
try:
|
||||||
response = httpx.get(
|
response = httpx.get(
|
||||||
url=f"https://{SITE_NAME}.{domain}/api/search?q={title_search.replace(' ', '+')}",
|
url=f"https://{SITE_NAME}.{domain}/api/search?q={title_search.replace(' ', '+')}",
|
||||||
@ -112,6 +119,7 @@ def title_search(title_search: str, domain: str) -> int:
|
|||||||
'slug': dict_title.get('slug'),
|
'slug': dict_title.get('slug'),
|
||||||
'name': dict_title.get('name'),
|
'name': dict_title.get('name'),
|
||||||
'type': dict_title.get('type'),
|
'type': dict_title.get('type'),
|
||||||
|
'date': dict_title.get('last_air_date'),
|
||||||
'score': dict_title.get('score')
|
'score': dict_title.get('score')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ import httpx
|
|||||||
# Internal utilities
|
# Internal utilities
|
||||||
from StreamingCommunity.Util.headers import get_headers
|
from StreamingCommunity.Util.headers import get_headers
|
||||||
from StreamingCommunity.Util._jsonConfig import config_manager
|
from StreamingCommunity.Util._jsonConfig import config_manager
|
||||||
from StreamingCommunity.Api.Player.Helper.Vixcloud.util import SeasonManager, EpisodeManager
|
from StreamingCommunity.Api.Player.Helper.Vixcloud.util import Season, EpisodeManager
|
||||||
|
|
||||||
|
|
||||||
# Variable
|
# Variable
|
||||||
@ -26,7 +26,7 @@ class ScrapeSerie:
|
|||||||
site_name (str): Name of the streaming site to scrape from
|
site_name (str): Name of the streaming site to scrape from
|
||||||
"""
|
"""
|
||||||
self.is_series = False
|
self.is_series = False
|
||||||
self.headers = {}
|
self.headers = {'user-agent': get_headers()}
|
||||||
self.base_name = site_name
|
self.base_name = site_name
|
||||||
self.domain = config_manager.get_dict('SITE', self.base_name)['domain']
|
self.domain = config_manager.get_dict('SITE', self.base_name)['domain']
|
||||||
|
|
||||||
@ -46,23 +46,22 @@ class ScrapeSerie:
|
|||||||
if series_name is not None:
|
if series_name is not None:
|
||||||
self.is_series = True
|
self.is_series = True
|
||||||
self.series_name = series_name
|
self.series_name = series_name
|
||||||
self.obj_season_manager: SeasonManager = SeasonManager()
|
self.season_manager = None
|
||||||
self.obj_episode_manager: EpisodeManager = EpisodeManager()
|
self.episode_manager: EpisodeManager = EpisodeManager()
|
||||||
|
|
||||||
# Create headers
|
|
||||||
self.headers = {
|
|
||||||
'user-agent': get_headers(),
|
|
||||||
'x-inertia': 'true',
|
|
||||||
'x-inertia-version': self.version,
|
|
||||||
}
|
|
||||||
|
|
||||||
def collect_info_seasons(self) -> None:
|
def collect_info_title(self) -> None:
|
||||||
"""
|
"""
|
||||||
Retrieve season information for a TV series from the streaming site.
|
Retrieve season information for a TV series from the streaming site.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
Exception: If there's an error fetching season information
|
Exception: If there's an error fetching season information
|
||||||
"""
|
"""
|
||||||
|
self.headers = {
|
||||||
|
'user-agent': get_headers(),
|
||||||
|
'x-inertia': 'true',
|
||||||
|
'x-inertia-version': self.version,
|
||||||
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
response = httpx.get(
|
response = httpx.get(
|
||||||
@ -73,17 +72,22 @@ class ScrapeSerie:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Extract seasons from JSON response
|
# Extract seasons from JSON response
|
||||||
json_response = response.json().get('props', {}).get('title', {}).get('seasons', [])
|
json_response = response.json().get('props')
|
||||||
|
|
||||||
# Add each season to the season manager
|
|
||||||
for dict_season in json_response:
|
|
||||||
self.obj_season_manager.add_season(dict_season)
|
|
||||||
|
|
||||||
|
# Collect info about season
|
||||||
|
self.season_manager = Season(json_response.get('title'))
|
||||||
|
self.season_manager.collect_images(self.base_name, self.domain)
|
||||||
|
|
||||||
|
# Collect first episode info
|
||||||
|
for i, ep in enumerate(json_response.get('loadedSeason').get('episodes')):
|
||||||
|
self.season_manager.episodes.add(ep)
|
||||||
|
self.season_manager.episodes.get(i).collect_image(self.base_name, self.domain)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error collecting season info: {e}")
|
logging.error(f"Error collecting season info: {e}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def collect_title_season(self, number_season: int) -> None:
|
def collect_info_season(self, number_season: int) -> None:
|
||||||
"""
|
"""
|
||||||
Retrieve episode information for a specific season.
|
Retrieve episode information for a specific season.
|
||||||
|
|
||||||
@ -93,6 +97,12 @@ class ScrapeSerie:
|
|||||||
Raises:
|
Raises:
|
||||||
Exception: If there's an error fetching episode information
|
Exception: If there's an error fetching episode information
|
||||||
"""
|
"""
|
||||||
|
self.headers = {
|
||||||
|
'user-agent': get_headers(),
|
||||||
|
'x-inertia': 'true',
|
||||||
|
'x-inertia-version': self.version,
|
||||||
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = httpx.get(
|
response = httpx.get(
|
||||||
url=f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
|
url=f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
|
||||||
@ -102,11 +112,11 @@ class ScrapeSerie:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Extract episodes from JSON response
|
# Extract episodes from JSON response
|
||||||
json_response = response.json().get('props', {}).get('loadedSeason', {}).get('episodes', [])
|
json_response = response.json().get('props').get('loadedSeason').get('episodes')
|
||||||
|
|
||||||
# Add each episode to the episode manager
|
# Add each episode to the episode manager
|
||||||
for dict_episode in json_response:
|
for dict_episode in json_response:
|
||||||
self.obj_episode_manager.add_episode(dict_episode)
|
self.episode_manager.add(dict_episode)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error collecting title season info: {e}")
|
logging.error(f"Error collecting title season info: {e}")
|
||||||
|
@ -49,7 +49,16 @@ def get_final_redirect_url(initial_url, max_timeout):
|
|||||||
|
|
||||||
# Create a client with redirects enabled
|
# Create a client with redirects enabled
|
||||||
try:
|
try:
|
||||||
with httpx.Client(follow_redirects=True, timeout=max_timeout, headers={'user-agent': get_headers()}) as client:
|
with httpx.Client(
|
||||||
|
headers={
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||||
|
'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||||
|
'User-Agent': get_headers()
|
||||||
|
},
|
||||||
|
follow_redirects=True,
|
||||||
|
timeout=max_timeout
|
||||||
|
|
||||||
|
) as client:
|
||||||
response = client.get(initial_url)
|
response = client.get(initial_url)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
@ -59,7 +68,7 @@ def get_final_redirect_url(initial_url, max_timeout):
|
|||||||
return final_url
|
return final_url
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.print(f"[cyan]Test url[white]: [red]{initial_url}, [cyan]error[white]: [red]{e}")
|
console.print(f"\n[cyan]Test url[white]: [red]{initial_url}, [cyan]error[white]: [red]{e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def search_domain(site_name: str, base_url: str):
|
def search_domain(site_name: str, base_url: str):
|
||||||
@ -69,7 +78,6 @@ def search_domain(site_name: str, base_url: str):
|
|||||||
Parameters:
|
Parameters:
|
||||||
- site_name (str): The name of the site to search the domain for.
|
- site_name (str): The name of the site to search the domain for.
|
||||||
- base_url (str): The base URL to construct complete URLs.
|
- base_url (str): The base URL to construct complete URLs.
|
||||||
- follow_redirects (bool): To follow redirect url or not.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
tuple: The found domain and the complete URL.
|
tuple: The found domain and the complete URL.
|
||||||
@ -80,47 +88,67 @@ def search_domain(site_name: str, base_url: str):
|
|||||||
domain = str(config_manager.get_dict("SITE", site_name)['domain'])
|
domain = str(config_manager.get_dict("SITE", site_name)['domain'])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# Test the current domain
|
# Test the current domain
|
||||||
response_follow = httpx.get(f"{base_url}.{domain}", headers={'user-agent': get_headers()}, timeout=max_timeout, follow_redirects=True)
|
with httpx.Client(
|
||||||
response_follow.raise_for_status()
|
headers={
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||||
|
'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||||
|
'User-Agent': get_headers()
|
||||||
|
},
|
||||||
|
follow_redirects=True,
|
||||||
|
timeout=max_timeout
|
||||||
|
|
||||||
|
) as client:
|
||||||
|
response_follow = client.get(f"{base_url}.{domain}")
|
||||||
|
response_follow.raise_for_status()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
||||||
query = base_url.split("/")[-1]
|
query = base_url.split("/")[-1]
|
||||||
first_url = google_search(query)
|
|
||||||
console.print(f"[green]First url from google seach[white]: [red]{first_url}")
|
# Perform a Google search with multiple results
|
||||||
|
search_results = list(search(query, num_results=5))
|
||||||
|
#console.print(f"[green]Google search results[white]: {search_results}")
|
||||||
|
|
||||||
if first_url:
|
# Iterate through search results
|
||||||
final_url = get_final_redirect_url(first_url, max_timeout)
|
for first_url in search_results:
|
||||||
|
console.print(f"[green]Checking url[white]: [red]{first_url}")
|
||||||
if final_url != None:
|
|
||||||
console.print(f"\n[bold yellow]Suggestion:[/bold yellow] [white](Experimental)\n"
|
|
||||||
f"[cyan]New final URL[white]: [green]{final_url}")
|
|
||||||
|
|
||||||
def extract_domain(url):
|
|
||||||
parsed_url = urlparse(url)
|
|
||||||
domain = parsed_url.netloc
|
|
||||||
return domain.split(".")[-1]
|
|
||||||
|
|
||||||
new_domain_extract = extract_domain(str(final_url))
|
|
||||||
|
|
||||||
if msg.ask(f"[red]Do you want to auto update config.json - '[green]{site_name}[red]' with domain: [green]{new_domain_extract}", choices=["y", "n"], default="y").lower() == "y":
|
|
||||||
|
|
||||||
# Update domain in config.json
|
|
||||||
config_manager.config['SITE'][site_name]['domain'] = new_domain_extract
|
|
||||||
config_manager.write_config()
|
|
||||||
|
|
||||||
# Return config domain
|
|
||||||
#console.print(f"[cyan]Return domain: [red]{new_domain_extract} \n")
|
|
||||||
return new_domain_extract, f"{base_url}.{new_domain_extract}"
|
|
||||||
|
|
||||||
else:
|
# Check if the base URL matches the Google search result
|
||||||
console.print("[bold red]\nManually change the domain in the JSON file.[/bold red]")
|
parsed_first_url = urlparse(first_url)
|
||||||
raise
|
|
||||||
|
|
||||||
else:
|
# Compare base url from google search with base url from config.json
|
||||||
console.print("[bold red]No valid URL to follow redirects.[/bold red]")
|
if parsed_first_url.netloc.split(".")[0] == base_url:
|
||||||
|
console.print(f"[red]URL does not match base URL. Skipping.[/red]")
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
final_url = get_final_redirect_url(first_url, max_timeout)
|
||||||
|
|
||||||
|
if final_url is not None:
|
||||||
|
|
||||||
|
def extract_domain(url):
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
domain = parsed_url.netloc
|
||||||
|
return domain.split(".")[-1]
|
||||||
|
|
||||||
|
new_domain_extract = extract_domain(str(final_url))
|
||||||
|
|
||||||
|
if msg.ask(f"[cyan]\nDo you want to auto site[white]: [red]{site_name}[cyan] with domain[white]: [red]{new_domain_extract}", choices=["y", "n"], default="y").lower() == "y":
|
||||||
|
|
||||||
|
# Update domain in config.json
|
||||||
|
config_manager.config['SITE'][site_name]['domain'] = new_domain_extract
|
||||||
|
config_manager.write_config()
|
||||||
|
|
||||||
|
# Return config domain
|
||||||
|
return new_domain_extract, f"{base_url}.{new_domain_extract}"
|
||||||
|
|
||||||
|
except Exception as redirect_error:
|
||||||
|
console.print(f"[red]Error following redirect for {first_url}: {redirect_error}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If no matching URL is found
|
||||||
|
console.print("[bold red]No valid URL found matching the base URL.[/bold red]")
|
||||||
|
raise Exception("No matching domain found")
|
||||||
|
|
||||||
# Ensure the URL is in string format before parsing
|
# Ensure the URL is in string format before parsing
|
||||||
parsed_url = urlparse(str(response_follow.url))
|
parsed_url = urlparse(str(response_follow.url))
|
||||||
@ -128,10 +156,9 @@ def search_domain(site_name: str, base_url: str):
|
|||||||
tld = parse_domain.split('.')[-1]
|
tld = parse_domain.split('.')[-1]
|
||||||
|
|
||||||
if tld is not None:
|
if tld is not None:
|
||||||
|
|
||||||
# Update domain in config.json
|
# Update domain in config.json
|
||||||
config_manager.config['SITE'][site_name]['domain'] = tld
|
config_manager.config['SITE'][site_name]['domain'] = tld
|
||||||
config_manager.write_config()
|
config_manager.write_config()
|
||||||
|
|
||||||
# Return config domain
|
# Return config domain
|
||||||
return tld, f"{base_url}.{tld}"
|
return tld, f"{base_url}.{tld}"
|
@ -117,16 +117,29 @@ def validate_selection(list_season_select: List[int], seasons_count: int) -> Lis
|
|||||||
Returns:
|
Returns:
|
||||||
- List[int]: Adjusted list of valid season numbers.
|
- List[int]: Adjusted list of valid season numbers.
|
||||||
"""
|
"""
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Remove any seasons greater than the available seasons
|
||||||
|
valid_seasons = [season for season in list_season_select if 1 <= season <= seasons_count]
|
||||||
|
|
||||||
# Remove any seasons greater than the available seasons
|
# If the list is empty, the input was completely invalid
|
||||||
valid_seasons = [season for season in list_season_select if 1 <= season <= seasons_count]
|
if not valid_seasons:
|
||||||
|
logging.error(f"Invalid selection: The selected seasons are outside the available range (1-{seasons_count}). Please try again.")
|
||||||
|
|
||||||
# If the list is empty, the input was completely invalid
|
# Re-prompt for valid input
|
||||||
if not valid_seasons:
|
input_seasons = input(f"Enter valid season numbers (1-{seasons_count}): ")
|
||||||
print()
|
list_season_select = list(map(int, input_seasons.split(',')))
|
||||||
raise ValueError(f"Invalid selection: The selected seasons are outside the available range (1-{seasons_count}).")
|
continue # Re-prompt the user if the selection is invalid
|
||||||
|
|
||||||
|
return valid_seasons # Return the valid seasons if the input is correct
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
logging.error("Error: Please enter valid integers separated by commas.")
|
||||||
|
|
||||||
return valid_seasons
|
# Prompt the user for valid input again
|
||||||
|
input_seasons = input(f"Enter valid season numbers (1-{seasons_count}): ")
|
||||||
|
list_season_select = list(map(int, input_seasons.split(',')))
|
||||||
|
|
||||||
|
|
||||||
# --> for episode
|
# --> for episode
|
||||||
@ -141,13 +154,26 @@ def validate_episode_selection(list_episode_select: List[int], episodes_count: i
|
|||||||
Returns:
|
Returns:
|
||||||
- List[int]: Adjusted list of valid episode numbers.
|
- List[int]: Adjusted list of valid episode numbers.
|
||||||
"""
|
"""
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
|
||||||
# Remove any episodes greater than the available episodes
|
# Remove any episodes greater than the available episodes
|
||||||
valid_episodes = [episode for episode in list_episode_select if 1 <= episode <= episodes_count]
|
valid_episodes = [episode for episode in list_episode_select if 1 <= episode <= episodes_count]
|
||||||
|
|
||||||
# If the list is empty, the input was completely invalid
|
# If the list is empty, the input was completely invalid
|
||||||
if not valid_episodes:
|
if not valid_episodes:
|
||||||
print()
|
logging.error(f"Invalid selection: The selected episodes are outside the available range (1-{episodes_count}). Please try again.")
|
||||||
raise ValueError(f"Invalid selection: The selected episodes are outside the available range (1-{episodes_count}).")
|
|
||||||
|
|
||||||
return valid_episodes
|
# Re-prompt for valid input
|
||||||
|
input_episodes = input(f"Enter valid episode numbers (1-{episodes_count}): ")
|
||||||
|
list_episode_select = list(map(int, input_episodes.split(',')))
|
||||||
|
continue # Re-prompt the user if the selection is invalid
|
||||||
|
|
||||||
|
return valid_episodes
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
logging.error("Error: Please enter valid integers separated by commas.")
|
||||||
|
|
||||||
|
# Prompt the user for valid input again
|
||||||
|
input_episodes = input(f"Enter valid episode numbers (1-{episodes_count}): ")
|
||||||
|
list_episode_select = list(map(int, input_episodes.split(',')))
|
@ -229,11 +229,6 @@ class M3U8_Segments:
|
|||||||
self.download_interrupted = True
|
self.download_interrupted = True
|
||||||
self.stop_event.set()
|
self.stop_event.set()
|
||||||
|
|
||||||
if threading.current_thread() is threading.main_thread():
|
|
||||||
signal.signal(signal.SIGINT, interrupt_handler)
|
|
||||||
else:
|
|
||||||
print("Signal handler must be set in the main thread")
|
|
||||||
|
|
||||||
def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
|
def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
|
||||||
"""
|
"""
|
||||||
Downloads a TS segment and adds it to the segment queue with retry logic.
|
Downloads a TS segment and adds it to the segment queue with retry logic.
|
||||||
@ -548,7 +543,7 @@ class M3U8_Segments:
|
|||||||
file_size = os.path.getsize(self.tmp_file_path)
|
file_size = os.path.getsize(self.tmp_file_path)
|
||||||
if file_size == 0:
|
if file_size == 0:
|
||||||
raise Exception("Output file is empty")
|
raise Exception("Output file is empty")
|
||||||
|
|
||||||
# Get expected time
|
# Get expected time
|
||||||
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
|
ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
|
||||||
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
|
ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
|
||||||
|
76
StreamingCommunity/Lib/Driver/driver_1.py
Normal file
76
StreamingCommunity/Lib/Driver/driver_1.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
# 29.06.24
|
||||||
|
|
||||||
|
import tempfile
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
# External library
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from seleniumbase import Driver
|
||||||
|
|
||||||
|
|
||||||
|
# Internal utilities
|
||||||
|
from StreamingCommunity.Util._jsonConfig import config_manager
|
||||||
|
|
||||||
|
|
||||||
|
# Config
|
||||||
|
USE_HEADLESS = config_manager.get_bool("BROWSER", "headless")
|
||||||
|
|
||||||
|
|
||||||
|
class WebAutomation:
|
||||||
|
"""
|
||||||
|
A class for automating web interactions using SeleniumBase and BeautifulSoup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""
|
||||||
|
Initializes the WebAutomation instance with SeleniumBase Driver.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
headless (bool, optional): Whether to run the browser in headless mode. Default is True.
|
||||||
|
"""
|
||||||
|
logging.getLogger('seleniumbase').setLevel(logging.ERROR)
|
||||||
|
|
||||||
|
self.driver = Driver(
|
||||||
|
uc=True,
|
||||||
|
uc_cdp_events=True,
|
||||||
|
headless=USE_HEADLESS,
|
||||||
|
user_data_dir = tempfile.mkdtemp(),
|
||||||
|
chromium_arg="--disable-search-engine-choice-screen"
|
||||||
|
)
|
||||||
|
|
||||||
|
def quit(self):
|
||||||
|
"""
|
||||||
|
Quits the WebDriver instance.
|
||||||
|
"""
|
||||||
|
self.driver.quit()
|
||||||
|
|
||||||
|
def get_page(self, url):
|
||||||
|
"""
|
||||||
|
Navigates the browser to the specified URL.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
url (str): The URL to navigate to.
|
||||||
|
"""
|
||||||
|
self.driver.get(url)
|
||||||
|
|
||||||
|
def retrieve_soup(self):
|
||||||
|
"""
|
||||||
|
Retrieves the BeautifulSoup object for the current page's HTML content.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
BeautifulSoup object: Parsed HTML content of the current page.
|
||||||
|
"""
|
||||||
|
html_content = self.driver.page_source
|
||||||
|
soup = BeautifulSoup(html_content, 'html.parser')
|
||||||
|
return soup
|
||||||
|
|
||||||
|
def get_content(self):
|
||||||
|
"""
|
||||||
|
Returns the HTML content of the current page.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The HTML content of the current page.
|
||||||
|
"""
|
||||||
|
return self.driver.page_source
|
||||||
|
|
@ -55,7 +55,6 @@ def get_video_duration(file_path: str) -> float:
|
|||||||
Returns:
|
Returns:
|
||||||
(float): The duration of the video in seconds if successful, None if there's an error.
|
(float): The duration of the video in seconds if successful, None if there's an error.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ffprobe_cmd = [FFPROB_PATH, '-v', 'error', '-show_format', '-print_format', 'json', file_path]
|
ffprobe_cmd = [FFPROB_PATH, '-v', 'error', '-show_format', '-print_format', 'json', file_path]
|
||||||
logging.info(f"FFmpeg command: {ffprobe_cmd}")
|
logging.info(f"FFmpeg command: {ffprobe_cmd}")
|
||||||
|
@ -1,5 +1,12 @@
|
|||||||
# 03.03.24
|
# 03.03.24
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
|
||||||
|
# External library
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
from rich.prompt import Prompt
|
from rich.prompt import Prompt
|
||||||
@ -9,15 +16,13 @@ from typing import Dict, List, Any
|
|||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
from .message import start_message
|
from .message import start_message
|
||||||
|
from .call_stack import get_call_stack
|
||||||
|
|
||||||
|
|
||||||
class TVShowManager:
|
class TVShowManager:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""
|
"""
|
||||||
Initialize TVShowManager with provided column information.
|
Initialize TVShowManager with provided column information.
|
||||||
|
|
||||||
Parameters:
|
|
||||||
- column_info (Dict[str, Dict[str, str]]): Dictionary containing column names, their colors, and justification.
|
|
||||||
"""
|
"""
|
||||||
self.console = Console()
|
self.console = Console()
|
||||||
self.tv_shows: List[Dict[str, Any]] = [] # List to store TV show data as dictionaries
|
self.tv_shows: List[Dict[str, Any]] = [] # List to store TV show data as dictionaries
|
||||||
@ -80,7 +85,6 @@ class TVShowManager:
|
|||||||
|
|
||||||
self.console.print(table) # Use self.console.print instead of print
|
self.console.print(table) # Use self.console.print instead of print
|
||||||
|
|
||||||
|
|
||||||
def run(self, force_int_input: bool = False, max_int_input: int = 0) -> str:
|
def run(self, force_int_input: bool = False, max_int_input: int = 0) -> str:
|
||||||
"""
|
"""
|
||||||
Run the TV show manager application.
|
Run the TV show manager application.
|
||||||
@ -101,9 +105,16 @@ class TVShowManager:
|
|||||||
# Display table
|
# Display table
|
||||||
self.display_data(self.tv_shows[self.slice_start:self.slice_end])
|
self.display_data(self.tv_shows[self.slice_start:self.slice_end])
|
||||||
|
|
||||||
|
# Find research function from call stack
|
||||||
|
research_func = None
|
||||||
|
for reverse_fun in get_call_stack():
|
||||||
|
if reverse_fun['function'] == 'search' and reverse_fun['script'] == '__init__.py':
|
||||||
|
research_func = reverse_fun
|
||||||
|
logging.info(f"Found research_func: {research_func}")
|
||||||
|
|
||||||
# Handling user input for loading more items or quitting
|
# Handling user input for loading more items or quitting
|
||||||
if self.slice_end < total_items:
|
if self.slice_end < total_items:
|
||||||
self.console.print(f"\n\n[yellow][INFO] [green]Press [red]Enter [green]for next page, or [red]'q' [green]to quit.")
|
self.console.print(f"\n\n[yellow][INFO] [green]Press [red]Enter [green]for next page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
|
||||||
|
|
||||||
if not force_int_input:
|
if not force_int_input:
|
||||||
key = Prompt.ask(
|
key = Prompt.ask(
|
||||||
@ -113,7 +124,7 @@ class TVShowManager:
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
choices = [str(i) for i in range(0, max_int_input)]
|
choices = [str(i) for i in range(0, max_int_input)]
|
||||||
choices.extend(["q", ""])
|
choices.extend(["q", "", "back"])
|
||||||
|
|
||||||
key = Prompt.ask("[cyan]Insert media [red]index", choices=choices, show_choices=False)
|
key = Prompt.ask("[cyan]Insert media [red]index", choices=choices, show_choices=False)
|
||||||
last_command = key
|
last_command = key
|
||||||
@ -127,22 +138,62 @@ class TVShowManager:
|
|||||||
if self.slice_end > total_items:
|
if self.slice_end > total_items:
|
||||||
self.slice_end = total_items
|
self.slice_end = total_items
|
||||||
|
|
||||||
|
elif key.lower() == "back" and research_func:
|
||||||
|
try:
|
||||||
|
# Find the project root directory
|
||||||
|
current_path = research_func['folder']
|
||||||
|
while not os.path.exists(os.path.join(current_path, 'StreamingCommunity')):
|
||||||
|
current_path = os.path.dirname(current_path)
|
||||||
|
|
||||||
|
# Add project root to Python path
|
||||||
|
project_root = current_path
|
||||||
|
#print(f"[DEBUG] Project Root: {project_root}")
|
||||||
|
|
||||||
|
if project_root not in sys.path:
|
||||||
|
sys.path.insert(0, project_root)
|
||||||
|
|
||||||
|
# Import using full absolute import
|
||||||
|
module_path = 'StreamingCommunity.Api.Site.streamingcommunity'
|
||||||
|
#print(f"[DEBUG] Importing module: {module_path}")
|
||||||
|
|
||||||
|
# Import the module
|
||||||
|
module = importlib.import_module(module_path)
|
||||||
|
|
||||||
|
# Get the search function
|
||||||
|
search_func = getattr(module, 'media_search_manager')
|
||||||
|
|
||||||
|
# Ask for search string
|
||||||
|
string_to_search = Prompt.ask(f"\n[purple]Insert word to search in [red]{research_func['folder_base']}").strip()
|
||||||
|
|
||||||
|
# Call the search function with the search string
|
||||||
|
search_func(string_to_search)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.console.print(f"[red]Error during search: {e}")
|
||||||
|
|
||||||
|
# Print detailed traceback
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
# Optionally remove the path if you want to clean up
|
||||||
|
if project_root in sys.path:
|
||||||
|
sys.path.remove(project_root)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Last slice, ensure all remaining items are shown
|
# Last slice, ensure all remaining items are shown
|
||||||
self.console.print(f"\n\n[yellow][INFO] [red]You've reached the end. [green]Press [red]Enter [green]for next page, or [red]'q' [green]to quit.")
|
self.console.print(f"\n\n[yellow][INFO] [green]You've reached the end. [red]Enter [green]for first page, [red]'q' [green]to quit, or [red]'back' [green]to search.")
|
||||||
if not force_int_input:
|
if not force_int_input:
|
||||||
key = Prompt.ask(
|
key = Prompt.ask(
|
||||||
"\n[cyan]Insert media index [yellow](e.g., 1), [red]* [cyan]to download all media, "
|
"\n[cyan]Insert media index [yellow](e.g., 1), [red]* [cyan]to download all media, "
|
||||||
"[yellow](e.g., 1-2) [cyan]for a range of media, or [yellow](e.g., 3-*) [cyan]to download from a specific index to the end"
|
"[yellow](e.g., 1-2) [cyan]for a range of media, or [yellow](e.g., 3-*) [cyan]to download from a specific index to the end"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
choices = [str(i) for i in range(0, max_int_input)]
|
choices = [str(i) for i in range(0, max_int_input)]
|
||||||
choices.extend(["q", ""])
|
choices.extend(["q", "", "back"])
|
||||||
|
|
||||||
key = Prompt.ask("[cyan]Insert media [red]index", choices=choices, show_choices=False)
|
key = Prompt.ask("[cyan]Insert media [red]index", choices=choices, show_choices=False)
|
||||||
last_command = key
|
last_command = key
|
||||||
@ -154,10 +205,51 @@ class TVShowManager:
|
|||||||
self.slice_start = 0
|
self.slice_start = 0
|
||||||
self.slice_end = self.step
|
self.slice_end = self.step
|
||||||
|
|
||||||
|
elif key.lower() == "back" and research_func:
|
||||||
|
try:
|
||||||
|
# Find the project root directory
|
||||||
|
current_path = research_func['folder']
|
||||||
|
while not os.path.exists(os.path.join(current_path, 'StreamingCommunity')):
|
||||||
|
current_path = os.path.dirname(current_path)
|
||||||
|
|
||||||
|
# Add project root to Python path
|
||||||
|
project_root = current_path
|
||||||
|
#print(f"[DEBUG] Project Root: {project_root}")
|
||||||
|
|
||||||
|
if project_root not in sys.path:
|
||||||
|
sys.path.insert(0, project_root)
|
||||||
|
|
||||||
|
# Import using full absolute import
|
||||||
|
module_path = 'StreamingCommunity.Api.Site.streamingcommunity'
|
||||||
|
#print(f"[DEBUG] Importing module: {module_path}")
|
||||||
|
|
||||||
|
# Import the module
|
||||||
|
module = importlib.import_module(module_path)
|
||||||
|
|
||||||
|
# Get the search function
|
||||||
|
search_func = getattr(module, 'media_search_manager')
|
||||||
|
|
||||||
|
# Ask for search string
|
||||||
|
string_to_search = Prompt.ask(f"\n[purple]Insert word to search in [red]{research_func['folder_base']}").strip()
|
||||||
|
|
||||||
|
# Call the search function with the search string
|
||||||
|
search_func(string_to_search)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.console.print(f"[red]Error during search: {e}")
|
||||||
|
|
||||||
|
# Print detailed traceback
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
# Optionally remove the path if you want to clean up
|
||||||
|
if project_root in sys.path:
|
||||||
|
sys.path.remove(project_root)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
return last_command
|
return last_command
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
self.tv_shows = []
|
self.tv_shows = []
|
@ -3,8 +3,7 @@ import axios from 'axios';
|
|||||||
import { Container, Button, Form, InputGroup } from 'react-bootstrap';
|
import { Container, Button, Form, InputGroup } from 'react-bootstrap';
|
||||||
|
|
||||||
import SearchBar from './SearchBar.js';
|
import SearchBar from './SearchBar.js';
|
||||||
|
import { API_URL } from './ApiUrl.js';
|
||||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
|
||||||
|
|
||||||
const Dashboard = () => {
|
const Dashboard = () => {
|
||||||
const [items, setItems] = useState([]);
|
const [items, setItems] = useState([]);
|
||||||
@ -15,7 +14,7 @@ const Dashboard = () => {
|
|||||||
|
|
||||||
const fetchItems = async (filter = '') => {
|
const fetchItems = async (filter = '') => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`${API_BASE_URL}/api/items?filter=${filter}`);
|
const response = await axios.get(`${API_URL}/items?filter=${filter}`);
|
||||||
setItems(response.data);
|
setItems(response.data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching items:", error);
|
console.error("Error fetching items:", error);
|
||||||
|
@ -4,7 +4,7 @@ import { Container, Row, Col, Card, Button, Badge, Modal } from 'react-bootstrap
|
|||||||
import { FaTrash, FaPlay } from 'react-icons/fa';
|
import { FaTrash, FaPlay } from 'react-icons/fa';
|
||||||
import { Link } from 'react-router-dom';
|
import { Link } from 'react-router-dom';
|
||||||
|
|
||||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
import { SERVER_PATH_URL, SERVER_DELETE_URL, API_URL } from './ApiUrl';
|
||||||
|
|
||||||
const Downloads = () => {
|
const Downloads = () => {
|
||||||
const [downloads, setDownloads] = useState([]);
|
const [downloads, setDownloads] = useState([]);
|
||||||
@ -15,7 +15,7 @@ const Downloads = () => {
|
|||||||
// Fetch all downloads
|
// Fetch all downloads
|
||||||
const fetchDownloads = async () => {
|
const fetchDownloads = async () => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`${API_BASE_URL}/downloads`);
|
const response = await axios.get(`${SERVER_PATH_URL}/get`);
|
||||||
setDownloads(response.data);
|
setDownloads(response.data);
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -27,7 +27,7 @@ const Downloads = () => {
|
|||||||
// Delete a TV episode
|
// Delete a TV episode
|
||||||
const handleDeleteEpisode = async (id, season, episode) => {
|
const handleDeleteEpisode = async (id, season, episode) => {
|
||||||
try {
|
try {
|
||||||
await axios.delete(`${API_BASE_URL}/deleteEpisode`, {
|
await axios.delete(`${SERVER_DELETE_URL}/episode`, {
|
||||||
params: { id, season, episode }
|
params: { id, season, episode }
|
||||||
});
|
});
|
||||||
fetchDownloads(); // Refresh the list
|
fetchDownloads(); // Refresh the list
|
||||||
@ -39,7 +39,7 @@ const Downloads = () => {
|
|||||||
// Delete a movie
|
// Delete a movie
|
||||||
const handleDeleteMovie = async (id) => {
|
const handleDeleteMovie = async (id) => {
|
||||||
try {
|
try {
|
||||||
await axios.delete(`${API_BASE_URL}/deleteMovie`, {
|
await axios.delete(`${SERVER_DELETE_URL}/movie`, {
|
||||||
params: { id }
|
params: { id }
|
||||||
});
|
});
|
||||||
fetchDownloads(); // Refresh the list
|
fetchDownloads(); // Refresh the list
|
||||||
@ -50,13 +50,16 @@ const Downloads = () => {
|
|||||||
|
|
||||||
// Watch video
|
// Watch video
|
||||||
const handleWatchVideo = (videoPath) => {
|
const handleWatchVideo = (videoPath) => {
|
||||||
|
console.log("Video path received:", videoPath); // Controlla il valore di videoPath
|
||||||
setCurrentVideo(videoPath);
|
setCurrentVideo(videoPath);
|
||||||
setShowPlayer(true);
|
setShowPlayer(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// Initial fetch of downloads
|
// Initial fetch of downloads
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
fetchDownloads();
|
fetchDownloads();
|
||||||
|
console.log("Downloads fetched:", downloads);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
if (loading) {
|
if (loading) {
|
||||||
@ -107,7 +110,7 @@ const Downloads = () => {
|
|||||||
<Button
|
<Button
|
||||||
variant="primary"
|
variant="primary"
|
||||||
size="sm"
|
size="sm"
|
||||||
onClick={() => handleWatchVideo(movie.path)}
|
onClick={() => handleWatchVideo(movie.path || movie.videoUrl)} // Usa il campo corretto
|
||||||
>
|
>
|
||||||
<FaPlay className="me-2" /> Watch
|
<FaPlay className="me-2" /> Watch
|
||||||
</Button>
|
</Button>
|
||||||
@ -180,12 +183,12 @@ const Downloads = () => {
|
|||||||
{/* Modal Video Player */}
|
{/* Modal Video Player */}
|
||||||
<Modal show={showPlayer} onHide={() => setShowPlayer(false)} size="lg" centered>
|
<Modal show={showPlayer} onHide={() => setShowPlayer(false)} size="lg" centered>
|
||||||
<Modal.Body>
|
<Modal.Body>
|
||||||
<video
|
<video
|
||||||
src={`http://127.0.0.1:1234/downloaded/${currentVideo}`}
|
src={`${API_URL}/downloaded/${currentVideo}`}
|
||||||
controls
|
controls
|
||||||
autoPlay
|
autoPlay
|
||||||
style={{ width: '100%' }}
|
style={{ width: '100%' }}
|
||||||
/>
|
/>
|
||||||
</Modal.Body>
|
</Modal.Body>
|
||||||
</Modal>
|
</Modal>
|
||||||
</Container>
|
</Container>
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import React, { useState } from 'react';
|
import React, { useState } from 'react';
|
||||||
import PropTypes from 'prop-types'; // Add this import
|
import PropTypes from 'prop-types';
|
||||||
import { useNavigate } from 'react-router-dom';
|
import { useNavigate } from 'react-router-dom';
|
||||||
import { Form, InputGroup, Button } from 'react-bootstrap';
|
import { Form, InputGroup, Button } from 'react-bootstrap';
|
||||||
import { FaSearch } from 'react-icons/fa';
|
import { FaSearch } from 'react-icons/fa';
|
||||||
@ -38,11 +38,8 @@ const SearchBar = ({ onSearch }) => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add PropTypes validation
|
|
||||||
SearchBar.propTypes = {
|
SearchBar.propTypes = {
|
||||||
onSearch: PropTypes.func // If onSearch is optional
|
onSearch: PropTypes.func
|
||||||
// or
|
|
||||||
// onSearch: PropTypes.func.isRequired // If onSearch is required
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default SearchBar;
|
export default SearchBar;
|
@ -4,8 +4,7 @@ import axios from 'axios';
|
|||||||
import { Container, Row, Col, Card, Spinner } from 'react-bootstrap';
|
import { Container, Row, Col, Card, Spinner } from 'react-bootstrap';
|
||||||
|
|
||||||
import SearchBar from './SearchBar.js';
|
import SearchBar from './SearchBar.js';
|
||||||
|
import { API_URL } from './ApiUrl.js';
|
||||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
|
||||||
|
|
||||||
const SearchResults = () => {
|
const SearchResults = () => {
|
||||||
const [results, setResults] = useState([]);
|
const [results, setResults] = useState([]);
|
||||||
@ -20,7 +19,7 @@ const SearchResults = () => {
|
|||||||
const fetchSearchResults = async () => {
|
const fetchSearchResults = async () => {
|
||||||
try {
|
try {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
const response = await axios.get(`${API_BASE_URL}/api/search`, {
|
const response = await axios.get(`${API_URL}/search`, {
|
||||||
params: { q: query }
|
params: { q: query }
|
||||||
});
|
});
|
||||||
setResults(response.data);
|
setResults(response.data);
|
||||||
|
@ -6,7 +6,7 @@ import { FaDownload, FaPlay, FaPlus, FaTrash } from 'react-icons/fa';
|
|||||||
|
|
||||||
import SearchBar from './SearchBar.js';
|
import SearchBar from './SearchBar.js';
|
||||||
|
|
||||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
import { API_URL, SERVER_WATCHLIST_URL, SERVER_PATH_URL } from './ApiUrl.js';
|
||||||
|
|
||||||
const TitleDetail = () => {
|
const TitleDetail = () => {
|
||||||
const [titleDetails, setTitleDetails] = useState(null);
|
const [titleDetails, setTitleDetails] = useState(null);
|
||||||
@ -27,7 +27,7 @@ const TitleDetail = () => {
|
|||||||
const titleUrl = location.state?.url || location.pathname.split('/title/')[1];
|
const titleUrl = location.state?.url || location.pathname.split('/title/')[1];
|
||||||
|
|
||||||
// Fetch title information
|
// Fetch title information
|
||||||
const response = await axios.get(`${API_BASE_URL}/api/getInfo`, {
|
const response = await axios.get(`${API_URL}/getInfo`, {
|
||||||
params: { url: titleUrl }
|
params: { url: titleUrl }
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -59,7 +59,7 @@ const TitleDetail = () => {
|
|||||||
const checkDownloadStatus = async (titleData) => {
|
const checkDownloadStatus = async (titleData) => {
|
||||||
try {
|
try {
|
||||||
if (titleData.type === 'movie') {
|
if (titleData.type === 'movie') {
|
||||||
const response = await axios.get(`${API_BASE_URL}/downloads`);
|
const response = await axios.get(`${SERVER_PATH_URL}/get`);
|
||||||
const downloadedMovie = response.data.find(
|
const downloadedMovie = response.data.find(
|
||||||
download => download.type === 'movie' && download.slug === titleData.slug
|
download => download.type === 'movie' && download.slug === titleData.slug
|
||||||
);
|
);
|
||||||
@ -70,7 +70,7 @@ const TitleDetail = () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else if (titleData.type === 'tv') {
|
} else if (titleData.type === 'tv') {
|
||||||
const response = await axios.get(`${API_BASE_URL}/downloads`);
|
const response = await axios.get(`${SERVER_PATH_URL}/get`);
|
||||||
const downloadedEpisodes = response.data.filter(
|
const downloadedEpisodes = response.data.filter(
|
||||||
download => download.type === 'tv' && download.slug === titleData.slug
|
download => download.type === 'tv' && download.slug === titleData.slug
|
||||||
);
|
);
|
||||||
@ -92,7 +92,7 @@ const TitleDetail = () => {
|
|||||||
// Check watchlist status
|
// Check watchlist status
|
||||||
const checkWatchlistStatus = async (slug) => {
|
const checkWatchlistStatus = async (slug) => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`${API_BASE_URL}/api/getWatchlist`);
|
const response = await axios.get(`${SERVER_WATCHLIST_URL}/get`);
|
||||||
const inWatchlist = response.data.some(item => item.name === slug);
|
const inWatchlist = response.data.some(item => item.name === slug);
|
||||||
setIsInWatchlist(inWatchlist);
|
setIsInWatchlist(inWatchlist);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -104,7 +104,7 @@ const TitleDetail = () => {
|
|||||||
if (titleDetails.type === 'tv') {
|
if (titleDetails.type === 'tv') {
|
||||||
try {
|
try {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
const seasonResponse = await axios.get(`${API_BASE_URL}/api/getInfoSeason`, {
|
const seasonResponse = await axios.get(`${API_URL}/getInfoSeason`, {
|
||||||
params: {
|
params: {
|
||||||
url: location.state?.url,
|
url: location.state?.url,
|
||||||
n: seasonNumber
|
n: seasonNumber
|
||||||
@ -123,7 +123,7 @@ const TitleDetail = () => {
|
|||||||
|
|
||||||
const handleDownloadFilm = async () => {
|
const handleDownloadFilm = async () => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`${API_BASE_URL}/downloadFilm`, {
|
const response = await axios.get(`${API_URL}/download/film`, {
|
||||||
params: {
|
params: {
|
||||||
id: titleDetails.id,
|
id: titleDetails.id,
|
||||||
slug: titleDetails.slug
|
slug: titleDetails.slug
|
||||||
@ -144,12 +144,14 @@ const TitleDetail = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleDownloadEpisode = async (seasonNumber, episodeNumber) => {
|
const handleDownloadEpisode = async (seasonNumber, episodeNumber, titleID, titleSlug) => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`${API_BASE_URL}/downloadEpisode`, {
|
const response = await axios.get(`${API_URL}/download/episode`, {
|
||||||
params: {
|
params: {
|
||||||
n_s: seasonNumber,
|
n_s: seasonNumber,
|
||||||
n_ep: episodeNumber
|
n_ep: episodeNumber,
|
||||||
|
titleID: titleID,
|
||||||
|
slug: titleSlug
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const videoPath = response.data.path;
|
const videoPath = response.data.path;
|
||||||
@ -176,7 +178,7 @@ const TitleDetail = () => {
|
|||||||
try {
|
try {
|
||||||
let path;
|
let path;
|
||||||
if (titleDetails.type === 'movie') {
|
if (titleDetails.type === 'movie') {
|
||||||
const response = await axios.get(`${API_BASE_URL}/moviePath`, {
|
const response = await axios.get(`${SERVER_PATH_URL}/movie`, {
|
||||||
params: { id: titleDetails.id }
|
params: { id: titleDetails.id }
|
||||||
});
|
});
|
||||||
path = response.data.path;
|
path = response.data.path;
|
||||||
@ -198,21 +200,21 @@ const TitleDetail = () => {
|
|||||||
|
|
||||||
const handleAddToWatchlist = async () => {
|
const handleAddToWatchlist = async () => {
|
||||||
try {
|
try {
|
||||||
await axios.post(`${API_BASE_URL}/api/addWatchlist`, {
|
await axios.post(`${SERVER_WATCHLIST_URL}/add`, {
|
||||||
name: titleDetails.slug,
|
name: titleDetails.slug,
|
||||||
url: location.state?.url || location.pathname.split('/title/')[1],
|
url: location.state?.url || location.pathname.split('/title/')[1],
|
||||||
season: titleDetails.season_count
|
season: titleDetails.season_count // Changed 'season_count' to 'season'
|
||||||
});
|
});
|
||||||
setIsInWatchlist(true);
|
setIsInWatchlist(true);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error adding to watchlist:", error);
|
console.error("Error adding to watchlist:", error);
|
||||||
alert("Error adding to watchlist. Please try again.");
|
alert("Error adding to watchlist. Please try again.");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleRemoveFromWatchlist = async () => {
|
const handleRemoveFromWatchlist = async () => {
|
||||||
try {
|
try {
|
||||||
await axios.post(`${API_BASE_URL}/api/removeWatchlist`, {
|
await axios.post(`${SERVER_WATCHLIST_URL}/remove`, {
|
||||||
name: titleDetails.slug
|
name: titleDetails.slug
|
||||||
});
|
});
|
||||||
setIsInWatchlist(false);
|
setIsInWatchlist(false);
|
||||||
@ -375,7 +377,7 @@ const TitleDetail = () => {
|
|||||||
) : (
|
) : (
|
||||||
<Button
|
<Button
|
||||||
variant="primary"
|
variant="primary"
|
||||||
onClick={() => handleDownloadEpisode(selectedSeason, episode.number)}
|
onClick={() => handleDownloadEpisode(selectedSeason, episode.number, titleDetails.id, titleDetails.slug)}
|
||||||
>
|
>
|
||||||
<FaDownload className="me-2" /> Download
|
<FaDownload className="me-2" /> Download
|
||||||
</Button>
|
</Button>
|
||||||
@ -393,7 +395,7 @@ const TitleDetail = () => {
|
|||||||
<Modal show={showPlayer} onHide={() => setShowPlayer(false)} size="lg" centered>
|
<Modal show={showPlayer} onHide={() => setShowPlayer(false)} size="lg" centered>
|
||||||
<Modal.Body>
|
<Modal.Body>
|
||||||
<video
|
<video
|
||||||
src={`http://127.0.0.1:1234/downloaded/${currentVideo}`}
|
src={`${API_URL}/downloaded/${currentVideo}`}
|
||||||
controls
|
controls
|
||||||
autoPlay
|
autoPlay
|
||||||
style={{ width: '100%' }}
|
style={{ width: '100%' }}
|
||||||
|
@ -4,7 +4,7 @@ import { Container, Row, Col, Card, Button, Badge, Alert } from 'react-bootstrap
|
|||||||
import { Link } from 'react-router-dom';
|
import { Link } from 'react-router-dom';
|
||||||
import { FaTrash } from 'react-icons/fa';
|
import { FaTrash } from 'react-icons/fa';
|
||||||
|
|
||||||
const API_BASE_URL = "http://127.0.0.1:1234";
|
import { SERVER_WATCHLIST_URL } from './ApiUrl';
|
||||||
|
|
||||||
const Watchlist = () => {
|
const Watchlist = () => {
|
||||||
const [watchlistItems, setWatchlistItems] = useState([]);
|
const [watchlistItems, setWatchlistItems] = useState([]);
|
||||||
@ -15,7 +15,7 @@ const Watchlist = () => {
|
|||||||
// Funzione per recuperare i dati della watchlist
|
// Funzione per recuperare i dati della watchlist
|
||||||
const fetchWatchlistData = async () => {
|
const fetchWatchlistData = async () => {
|
||||||
try {
|
try {
|
||||||
const watchlistResponse = await axios.get(`${API_BASE_URL}/api/getWatchlist`);
|
const watchlistResponse = await axios.get(`${SERVER_WATCHLIST_URL}/get`);
|
||||||
setWatchlistItems(watchlistResponse.data);
|
setWatchlistItems(watchlistResponse.data);
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -27,7 +27,7 @@ const Watchlist = () => {
|
|||||||
// Funzione per controllare se ci sono nuove stagioni (attivata dal bottone)
|
// Funzione per controllare se ci sono nuove stagioni (attivata dal bottone)
|
||||||
const checkNewSeasons = async () => {
|
const checkNewSeasons = async () => {
|
||||||
try {
|
try {
|
||||||
const newSeasonsResponse = await axios.get(`${API_BASE_URL}/api/checkWatchlist`);
|
const newSeasonsResponse = await axios.get(`${SERVER_WATCHLIST_URL}/check`);
|
||||||
|
|
||||||
if (Array.isArray(newSeasonsResponse.data)) {
|
if (Array.isArray(newSeasonsResponse.data)) {
|
||||||
setNewSeasons(newSeasonsResponse.data);
|
setNewSeasons(newSeasonsResponse.data);
|
||||||
@ -58,7 +58,7 @@ const Watchlist = () => {
|
|||||||
// Manda una richiesta POST per ogni titolo con nuove stagioni
|
// Manda una richiesta POST per ogni titolo con nuove stagioni
|
||||||
console.log(`Updated watchlist for ${season.name} with new season ${season.nNewSeason}, url: ${season.title_url}`);
|
console.log(`Updated watchlist for ${season.name} with new season ${season.nNewSeason}, url: ${season.title_url}`);
|
||||||
|
|
||||||
await axios.post(`${API_BASE_URL}/api/updateTitleWatchlist`, {
|
await axios.post(`${SERVER_WATCHLIST_URL}/update`, {
|
||||||
url: season.title_url,
|
url: season.title_url,
|
||||||
season: season.season
|
season: season.season
|
||||||
});
|
});
|
||||||
@ -72,15 +72,17 @@ const Watchlist = () => {
|
|||||||
// Funzione per rimuovere un elemento dalla watchlist
|
// Funzione per rimuovere un elemento dalla watchlist
|
||||||
const handleRemoveFromWatchlist = async (serieName) => {
|
const handleRemoveFromWatchlist = async (serieName) => {
|
||||||
try {
|
try {
|
||||||
await axios.post(`${API_BASE_URL}/api/removeWatchlist`, { name: serieName });
|
await axios.post(`${SERVER_WATCHLIST_URL}/remove`, {
|
||||||
|
name: serieName
|
||||||
// Aggiorna lo stato locale per rimuovere l'elemento dalla watchlist
|
});
|
||||||
|
|
||||||
setWatchlistItems((prev) => prev.filter((item) => item.name !== serieName));
|
setWatchlistItems((prev) => prev.filter((item) => item.name !== serieName));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error removing from watchlist:", error);
|
console.error("Error removing from watchlist:", error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// Carica inizialmente la watchlist
|
// Carica inizialmente la watchlist
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
fetchWatchlistData();
|
fetchWatchlistData();
|
||||||
|
12
config.json
12
config.json
@ -2,9 +2,9 @@
|
|||||||
"DEFAULT": {
|
"DEFAULT": {
|
||||||
"debug": false,
|
"debug": false,
|
||||||
"log_file": "app.log",
|
"log_file": "app.log",
|
||||||
"log_to_file": true,
|
"log_to_file": false,
|
||||||
"show_message": true,
|
"show_message": false,
|
||||||
"clean_console": true,
|
"clean_console": false,
|
||||||
"root_path": "Video",
|
"root_path": "Video",
|
||||||
"movie_folder_name": "Movie",
|
"movie_folder_name": "Movie",
|
||||||
"serie_folder_name": "TV",
|
"serie_folder_name": "TV",
|
||||||
@ -24,8 +24,8 @@
|
|||||||
"download_video": true,
|
"download_video": true,
|
||||||
"download_audio": true,
|
"download_audio": true,
|
||||||
"merge_audio": true,
|
"merge_audio": true,
|
||||||
"default_video_workser": 12,
|
"default_video_workser": 8,
|
||||||
"default_audio_workser": 12,
|
"default_audio_workser": 8,
|
||||||
"specific_list_audio": [
|
"specific_list_audio": [
|
||||||
"ita"
|
"ita"
|
||||||
],
|
],
|
||||||
@ -55,7 +55,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"EXTRA": {
|
"EXTRA": {
|
||||||
"mongodb": "mongodb+srv://..",
|
"mongodb": "mongodb+srv://...",
|
||||||
"database": "StreamingCommunity"
|
"database": "StreamingCommunity"
|
||||||
}
|
}
|
||||||
}
|
}
|
718
server.py
718
server.py
@ -1,19 +1,30 @@
|
|||||||
|
# 13.12.24
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
#logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
||||||
import datetime
|
import datetime
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse, unquote
|
||||||
from urllib.parse import unquote
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
# External
|
# External
|
||||||
|
import uvicorn
|
||||||
|
from rich.console import Console
|
||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
from flask_cors import CORS
|
from fastapi import FastAPI, HTTPException, Query
|
||||||
from flask import Flask, jsonify, request
|
from fastapi.responses import FileResponse
|
||||||
from flask import send_from_directory
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
|
||||||
# Util
|
# Util
|
||||||
|
from StreamingCommunity.Util.os import os_summary
|
||||||
|
os_summary.get_system_summary()
|
||||||
|
from StreamingCommunity.Util.logger import Logger
|
||||||
|
log = Logger()
|
||||||
from StreamingCommunity.Util._jsonConfig import config_manager
|
from StreamingCommunity.Util._jsonConfig import config_manager
|
||||||
|
from server_type import WatchlistItem, UpdateWatchlistItem
|
||||||
|
from server_util import updateUrl
|
||||||
|
|
||||||
|
|
||||||
# Internal
|
# Internal
|
||||||
@ -23,19 +34,30 @@ from StreamingCommunity.Api.Site.streamingcommunity.film import download_film
|
|||||||
from StreamingCommunity.Api.Site.streamingcommunity.series import download_video
|
from StreamingCommunity.Api.Site.streamingcommunity.series import download_video
|
||||||
from StreamingCommunity.Api.Site.streamingcommunity.util.ScrapeSerie import ScrapeSerie
|
from StreamingCommunity.Api.Site.streamingcommunity.util.ScrapeSerie import ScrapeSerie
|
||||||
|
|
||||||
|
|
||||||
# Player
|
# Player
|
||||||
from StreamingCommunity.Api.Player.vixcloud import VideoSource
|
from StreamingCommunity.Api.Player.vixcloud import VideoSource
|
||||||
|
|
||||||
|
|
||||||
# Variable
|
# Variable
|
||||||
app = Flask(__name__)
|
app = FastAPI()
|
||||||
CORS(app)
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Site variable
|
# Site variable
|
||||||
version, domain = get_version_and_domain()
|
version, domain = get_version_and_domain()
|
||||||
season_name = None
|
season_name = None
|
||||||
scrape_serie = ScrapeSerie("streamingcommunity")
|
scrape_serie = ScrapeSerie("streamingcommunity")
|
||||||
video_source = VideoSource("streamingcommunity", True)
|
video_source = VideoSource("streamingcommunity", True)
|
||||||
|
|
||||||
DOWNLOAD_DIRECTORY = os.getcwd()
|
DOWNLOAD_DIRECTORY = os.getcwd()
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
|
||||||
# Mongo variable
|
# Mongo variable
|
||||||
@ -47,179 +69,134 @@ downloads_collection = db['downloads']
|
|||||||
|
|
||||||
|
|
||||||
# ---------- SITE API ------------
|
# ---------- SITE API ------------
|
||||||
@app.route('/')
|
@app.get("/", summary="Health Check")
|
||||||
def index():
|
async def index():
|
||||||
"""
|
|
||||||
Health check endpoint to confirm server is operational.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Operational status message
|
|
||||||
"""
|
|
||||||
logging.info("Health check endpoint accessed")
|
logging.info("Health check endpoint accessed")
|
||||||
return 'Server is operational'
|
return "Server is operational"
|
||||||
|
|
||||||
@app.route('/api/search', methods=['GET'])
|
@app.get("/api/search")
|
||||||
def get_list_search():
|
async def get_list_search(q: Optional[str] = Query(None)):
|
||||||
"""
|
if not q:
|
||||||
Search for titles based on query parameter.
|
logging.warning("Search request without query parameter")
|
||||||
|
raise HTTPException(status_code=400, detail="Missing query parameter")
|
||||||
Returns:
|
|
||||||
JSON response with search results or error message
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
query = request.args.get('q')
|
result = search_titles(q, domain)
|
||||||
|
logging.info(f"Search performed for query: {q}")
|
||||||
if not query:
|
return result
|
||||||
logging.warning("Search request without query parameter")
|
|
||||||
return jsonify({'error': 'Missing query parameter'}), 400
|
|
||||||
|
|
||||||
result = search_titles(query, domain)
|
|
||||||
logging.info(f"Search performed for query: {query}")
|
|
||||||
return jsonify(result), 200
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error in search: {str(e)}", exc_info=True)
|
logging.error(f"Error in search: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Internal server error'}), 500
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
@app.route('/api/getInfo', methods=['GET'])
|
@app.get("/api/getInfo")
|
||||||
def get_info_title():
|
async def get_info_title(url: Optional[str] = Query(None)):
|
||||||
"""
|
if not url or "http" not in url:
|
||||||
Retrieve information for a specific title.
|
logging.warning("GetInfo request without URL parameter")
|
||||||
|
raise HTTPException(status_code=400, detail="Missing URL parameter")
|
||||||
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with title information or error message
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
title_url = request.args.get('url')
|
result = get_infoSelectTitle(url, domain, version)
|
||||||
|
|
||||||
if not title_url:
|
|
||||||
logging.warning("GetInfo request without URL parameter")
|
|
||||||
return jsonify({'error': 'Missing URL parameter'}), 400
|
|
||||||
|
|
||||||
result = get_infoSelectTitle(title_url, domain, version)
|
|
||||||
|
|
||||||
if result.get('type') == "tv":
|
if result.get('type') == "tv":
|
||||||
global season_name, scrape_serie, video_source
|
global season_name, scrape_serie, video_source
|
||||||
|
|
||||||
season_name = result.get('slug')
|
season_name = result.get('slug')
|
||||||
|
|
||||||
scrape_serie.setup(
|
scrape_serie.setup(
|
||||||
version=version,
|
version=version,
|
||||||
media_id=int(result.get('id')),
|
media_id=int(result.get('id')),
|
||||||
series_name=result.get('slug')
|
series_name=result.get('slug')
|
||||||
)
|
)
|
||||||
|
|
||||||
video_source.setup(result.get('id'))
|
video_source.setup(result.get('id'))
|
||||||
|
|
||||||
logging.info(f"TV series info retrieved: {season_name}")
|
logging.info(f"TV series info retrieved: {season_name}")
|
||||||
|
|
||||||
return jsonify(result), 200
|
return result
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error retrieving title info: {str(e)}", exc_info=True)
|
logging.error(f"Error retrieving title info: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Failed to retrieve title information'}), 500
|
|
||||||
|
|
||||||
@app.route('/api/getInfoSeason', methods=['GET'])
|
@app.get("/api/getInfoSeason")
|
||||||
def get_info_season():
|
async def get_info_season(url: Optional[str] = Query(None), n: Optional[str] = Query(None)):
|
||||||
"""
|
if not url or not n:
|
||||||
Retrieve season information for a specific title.
|
logging.warning("GetInfoSeason request with missing parameters")
|
||||||
|
raise HTTPException(status_code=400, detail="Missing URL or season number")
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with season information or error message
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
title_url = request.args.get('url')
|
result = get_infoSelectSeason(url, n, domain, version)
|
||||||
number_season = request.args.get('n')
|
logging.info(f"Season info retrieved for season {n}")
|
||||||
|
return result
|
||||||
if not title_url or not number_season:
|
|
||||||
logging.warning("GetInfoSeason request with missing parameters")
|
|
||||||
return jsonify({'error': 'Missing URL or season number'}), 400
|
|
||||||
|
|
||||||
result = get_infoSelectSeason(title_url, number_season, domain, version)
|
|
||||||
logging.info(f"Season info retrieved for season {number_season}")
|
|
||||||
return jsonify(result), 200
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error retrieving season info: {str(e)}", exc_info=True)
|
logging.error(f"Error retrieving season info: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Failed to retrieve season information'}), 500
|
raise HTTPException(status_code=500, detail="Failed to retrieve season information")
|
||||||
|
|
||||||
@app.route('/api/getdomain', methods=['GET'])
|
@app.get("/api/getdomain")
|
||||||
def get_domain():
|
async def get_domain():
|
||||||
"""
|
|
||||||
Retrieve current domain and version.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with domain and version
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
global version, domain
|
global version, domain
|
||||||
version, domain = get_version_and_domain()
|
version, domain = get_version_and_domain()
|
||||||
logging.info(f"Domain retrieved: {domain}, Version: {version}")
|
logging.info(f"Domain retrieved: {domain}, Version: {version}")
|
||||||
return jsonify({'domain': domain, 'version': version}), 200
|
|
||||||
|
return {"domain": domain, "version": version}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error retrieving domain: {str(e)}", exc_info=True)
|
logging.error(f"Error retrieving domain: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Failed to retrieve domain information'}), 500
|
raise HTTPException(status_code=500, detail="Failed to retrieve domain information")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# ---------- DOWNLOAD API ------------
|
# ---------- DOWNLOAD API ------------
|
||||||
@app.route('/downloadFilm', methods=['GET'])
|
@app.get("/api/download/film")
|
||||||
def call_download_film():
|
async def call_download_film(id: Optional[str] = Query(None), slug: Optional[str] = Query(None)):
|
||||||
"""
|
if not id or not slug:
|
||||||
Download a film by its ID and slug.
|
logging.warning("Download film request with missing parameters")
|
||||||
|
raise HTTPException(status_code=400, detail="Missing film ID or slug")
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with download path or error message
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
film_id = request.args.get('id')
|
item_media = MediaItem(**{'id': id, 'slug': slug})
|
||||||
slug = request.args.get('slug')
|
|
||||||
|
|
||||||
if not film_id or not slug:
|
|
||||||
logging.warning("Download film request with missing parameters")
|
|
||||||
return jsonify({'error': 'Missing film ID or slug'}), 400
|
|
||||||
|
|
||||||
item_media = MediaItem(**{'id': film_id, 'slug': slug})
|
|
||||||
path_download = download_film(item_media)
|
path_download = download_film(item_media)
|
||||||
|
|
||||||
download_data = {
|
download_data = {
|
||||||
'type': 'movie',
|
'type': 'movie',
|
||||||
'id': film_id,
|
'id': id,
|
||||||
'slug': slug,
|
'slug': slug,
|
||||||
'path': path_download,
|
'path': path_download,
|
||||||
'timestamp': datetime.datetime.now(datetime.timezone.utc)
|
'timestamp': datetime.datetime.now(datetime.timezone.utc)
|
||||||
}
|
}
|
||||||
downloads_collection.insert_one(download_data)
|
downloads_collection.insert_one(download_data)
|
||||||
|
|
||||||
logging.info(f"Film downloaded: {slug}")
|
logging.info(f"Film downloaded: {slug}")
|
||||||
return jsonify({'path': path_download}), 200
|
return {"path": path_download}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error downloading film: {str(e)}", exc_info=True)
|
logging.error(f"Error downloading film: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Failed to download film'}), 500
|
raise HTTPException(status_code=500, detail="Failed to download film")
|
||||||
|
|
||||||
@app.route('/downloadEpisode', methods=['GET'])
|
@app.get("/api/download/episode")
|
||||||
def call_download_episode():
|
async def call_download_episode(n_s: Optional[int] = Query(None), n_ep: Optional[int] = Query(None), titleID: Optional[int] = Query(None), slug: Optional[str] = Query(None)):
|
||||||
"""
|
global scrape_serie
|
||||||
Download a specific TV series episode.
|
|
||||||
|
if not n_s or not n_ep:
|
||||||
|
logging.warning("Download episode request with missing parameters")
|
||||||
|
raise HTTPException(status_code=400, detail="Missing season or episode number")
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with download path or error message
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
season_number = request.args.get('n_s')
|
|
||||||
episode_number = request.args.get('n_ep')
|
scrape_serie.setup(
|
||||||
|
version=version,
|
||||||
if not season_number or not episode_number:
|
media_id=int(titleID),
|
||||||
logging.warning("Download episode request with missing parameters")
|
series_name=slug
|
||||||
return jsonify({'error': 'Missing season or episode number'}), 400
|
)
|
||||||
|
video_source.setup(int(titleID))
|
||||||
season_number = int(season_number)
|
|
||||||
episode_number = int(episode_number)
|
scrape_serie.collect_info_title()
|
||||||
|
scrape_serie.collect_info_season(n_s)
|
||||||
scrape_serie.collect_title_season(season_number)
|
|
||||||
path_download = download_video(
|
path_download = download_video(
|
||||||
season_name,
|
season_name,
|
||||||
season_number,
|
n_s,
|
||||||
episode_number,
|
n_ep,
|
||||||
scrape_serie,
|
scrape_serie,
|
||||||
video_source
|
video_source
|
||||||
)
|
)
|
||||||
@ -228,178 +205,151 @@ def call_download_episode():
|
|||||||
'type': 'tv',
|
'type': 'tv',
|
||||||
'id': scrape_serie.media_id,
|
'id': scrape_serie.media_id,
|
||||||
'slug': scrape_serie.series_name,
|
'slug': scrape_serie.series_name,
|
||||||
'n_s': season_number,
|
'n_s': n_s,
|
||||||
'n_ep': episode_number,
|
'n_ep': n_ep,
|
||||||
'path': path_download,
|
'path': path_download,
|
||||||
'timestamp': datetime.datetime.now(datetime.timezone.utc)
|
'timestamp': datetime.datetime.now(datetime.timezone.utc)
|
||||||
}
|
}
|
||||||
|
|
||||||
downloads_collection.insert_one(download_data)
|
downloads_collection.insert_one(download_data)
|
||||||
|
|
||||||
logging.info(f"Episode downloaded: S{season_number}E{episode_number}")
|
logging.info(f"Episode downloaded: S{n_s}E{n_ep}")
|
||||||
return jsonify({'path': path_download}), 200
|
return {"path": path_download}
|
||||||
|
|
||||||
except ValueError:
|
|
||||||
logging.error("Invalid season or episode number format")
|
|
||||||
return jsonify({'error': 'Invalid season or episode number'}), 400
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error downloading episode: {str(e)}", exc_info=True)
|
logging.error(f"Error downloading episode: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Failed to download episode'}), 500
|
raise HTTPException(status_code=500, detail="Failed to download episode")
|
||||||
|
|
||||||
@app.route('/downloaded/<path:filename>', methods=['GET'])
|
@app.get("/api/downloaded/{filename:path}")
|
||||||
def serve_downloaded_file(filename):
|
async def serve_downloaded_file(filename: str):
|
||||||
"""
|
|
||||||
Serve downloaded files with proper URL decoding and error handling.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Downloaded file or error message
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
# URL decode the filename
|
# Decodifica il nome file
|
||||||
decoded_filename = unquote(filename)
|
decoded_filename = unquote(filename)
|
||||||
logging.debug(f"Requested file: {decoded_filename}")
|
logging.info(f"Decoded filename: {decoded_filename}")
|
||||||
|
|
||||||
# Construct full file path
|
# Normalizza il percorso
|
||||||
file_path = os.path.join(DOWNLOAD_DIRECTORY, decoded_filename)
|
file_path = os.path.normpath(os.path.join(DOWNLOAD_DIRECTORY, decoded_filename))
|
||||||
logging.debug(f"Full file path: {file_path}")
|
|
||||||
|
# Verifica che il file sia all'interno della directory
|
||||||
# Verify file exists
|
if not file_path.startswith(os.path.abspath(DOWNLOAD_DIRECTORY)):
|
||||||
|
logging.error(f"Path traversal attempt detected: {file_path}")
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid file path")
|
||||||
|
|
||||||
|
# Verifica che il file esista
|
||||||
if not os.path.isfile(file_path):
|
if not os.path.isfile(file_path):
|
||||||
logging.warning(f"File not found: {decoded_filename}")
|
logging.error(f"File not found: {file_path}")
|
||||||
return jsonify({'error': 'File not found'}), 404
|
raise HTTPException(status_code=404, detail="File not found")
|
||||||
|
|
||||||
# Serve the file
|
# Restituisci il file
|
||||||
return send_from_directory(DOWNLOAD_DIRECTORY, decoded_filename, as_attachment=False)
|
return FileResponse(file_path)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error serving file: {str(e)}", exc_info=True)
|
logging.error(f"Error serving file: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Internal server error'}), 500
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- WATCHLIST UTIL MONGO ------------
|
||||||
|
@app.post("/server/watchlist/add")
|
||||||
|
async def add_to_watchlist(item: WatchlistItem):
|
||||||
|
existing_item = watchlist_collection.find_one({
|
||||||
|
'name': item.name,
|
||||||
|
'url': item.url,
|
||||||
|
'season': item.season
|
||||||
|
})
|
||||||
|
|
||||||
# ---------- WATCHLIST MONGO ------------
|
if existing_item:
|
||||||
@app.route('/api/addWatchlist', methods=['POST'])
|
logging.warning(f"Item already in watchlist: {item.name}")
|
||||||
def add_to_watchlist():
|
raise HTTPException(status_code=400, detail="Il titolo è già nella watchlist")
|
||||||
title_name = request.json.get('name')
|
|
||||||
title_url = request.json.get('url')
|
|
||||||
season = request.json.get('season')
|
|
||||||
|
|
||||||
if title_url and season:
|
watchlist_collection.insert_one({
|
||||||
|
'name': item.name,
|
||||||
|
'title_url': item.url,
|
||||||
|
'season': item.season,
|
||||||
|
'added_on': datetime.datetime.utcnow()
|
||||||
|
})
|
||||||
|
|
||||||
existing_item = watchlist_collection.find_one({'name': title_name, 'url': title_url, 'season': season})
|
logging.info(f"Added to watchlist: {item.name}")
|
||||||
if existing_item:
|
return {"message": "Titolo aggiunto alla watchlist"}
|
||||||
return jsonify({'message': 'Il titolo è già nella watchlist'}), 400
|
|
||||||
|
|
||||||
watchlist_collection.insert_one({
|
@app.post("/server/watchlist/update")
|
||||||
'name': title_name,
|
async def update_title_watchlist(update: UpdateWatchlistItem):
|
||||||
'title_url': title_url,
|
result = watchlist_collection.update_one(
|
||||||
'season': season,
|
{'title_url': update.url},
|
||||||
'added_on': datetime.datetime.utcnow()
|
{'$set': {'season': update.season}}
|
||||||
})
|
)
|
||||||
return jsonify({'message': 'Titolo aggiunto alla watchlist'}), 200
|
|
||||||
else:
|
|
||||||
return jsonify({'message': 'Missing title_url or season'}), 400
|
|
||||||
|
|
||||||
@app.route('/api/updateTitleWatchlist', methods=['POST'])
|
if result.matched_count == 0:
|
||||||
def update_title_watchlist():
|
logging.warning(f"Item not found for update: {update.url}")
|
||||||
print(request.json)
|
raise HTTPException(status_code=404, detail="Titolo non trovato nella watchlist")
|
||||||
|
|
||||||
title_url = request.json.get('url')
|
|
||||||
new_season = request.json.get('season')
|
|
||||||
|
|
||||||
if title_url is not None and new_season is not None:
|
|
||||||
result = watchlist_collection.update_one(
|
|
||||||
{'title_url': title_url},
|
|
||||||
{'$set': {'season': new_season}}
|
|
||||||
)
|
|
||||||
|
|
||||||
if result.matched_count == 0:
|
|
||||||
return jsonify({'message': 'Titolo non trovato nella watchlist'}), 404
|
|
||||||
|
|
||||||
if result.modified_count == 0:
|
|
||||||
return jsonify({'message': 'La stagione non è cambiata'}), 200
|
|
||||||
|
|
||||||
return jsonify({'message': 'Stagione aggiornata con successo'}), 200
|
|
||||||
|
|
||||||
else:
|
if result.modified_count == 0:
|
||||||
return jsonify({'message': 'Missing title_url or season'}), 400
|
logging.info(f"Season unchanged for: {update.url}")
|
||||||
|
return {"message": "La stagione non è cambiata"}
|
||||||
@app.route('/api/removeWatchlist', methods=['POST'])
|
|
||||||
def remove_from_watchlist():
|
|
||||||
title_name = request.json.get('name')
|
|
||||||
|
|
||||||
if title_name:
|
logging.info(f"Updated season for: {update.url}")
|
||||||
result = watchlist_collection.delete_one({'name': title_name})
|
return {"message": "Stagione aggiornata con successo"}
|
||||||
|
|
||||||
if result.deleted_count == 1:
|
@app.post("/server/watchlist/remove")
|
||||||
return jsonify({'message': 'Titolo rimosso dalla watchlist'}), 200
|
async def remove_from_watchlist(item: WatchlistItem):
|
||||||
else:
|
# You can handle just the 'name' field here
|
||||||
return jsonify({'message': 'Titolo non trovato nella watchlist'}), 404
|
result = watchlist_collection.delete_one({'name': item.name})
|
||||||
else:
|
|
||||||
return jsonify({'message': 'Missing title_url or season'}), 400
|
|
||||||
|
|
||||||
@app.route('/api/getWatchlist', methods=['GET'])
|
|
||||||
def get_watchlist():
|
|
||||||
watchlist_items = list(watchlist_collection.find({}, {'_id': 0}))
|
|
||||||
|
|
||||||
if watchlist_items:
|
if result.deleted_count == 0:
|
||||||
return jsonify(watchlist_items), 200
|
logging.warning(f"Item not found for removal: {item.name}")
|
||||||
else:
|
raise HTTPException(status_code=404, detail="Titolo non trovato nella watchlist")
|
||||||
return jsonify({'message': 'La watchlist è vuota'}), 200
|
|
||||||
|
|
||||||
@app.route('/api/checkWatchlist', methods=['GET'])
|
logging.info(f"Successfully removed from watchlist: {item.name}")
|
||||||
def get_newSeason():
|
return {"message": "Titolo rimosso dalla watchlist"}
|
||||||
title_newSeasons = []
|
|
||||||
|
@app.get("/server/watchlist/get")
|
||||||
|
async def get_watchlist():
|
||||||
watchlist_items = list(watchlist_collection.find({}, {'_id': 0}))
|
watchlist_items = list(watchlist_collection.find({}, {'_id': 0}))
|
||||||
|
|
||||||
if not watchlist_items:
|
if not watchlist_items:
|
||||||
return jsonify({'message': 'La watchlist è vuota'}), 200
|
logging.info("Watchlist is empty")
|
||||||
|
return {"message": "La watchlist è vuota"}
|
||||||
|
|
||||||
|
logging.info("Watchlist retrieved")
|
||||||
|
return watchlist_items
|
||||||
|
|
||||||
|
@app.get("/server/watchlist/check")
|
||||||
|
async def get_new_season():
|
||||||
|
title_new_seasons = []
|
||||||
|
watchlist_items = list(watchlist_collection.find({}, {'_id': 0}))
|
||||||
|
logging.error("GET: ", watchlist_items)
|
||||||
|
|
||||||
|
if not watchlist_items:
|
||||||
|
logging.info("Watchlist is empty")
|
||||||
|
return {"message": "La watchlist è vuota"}
|
||||||
|
|
||||||
for item in watchlist_items:
|
for item in watchlist_items:
|
||||||
title_url = item.get('title_url')
|
|
||||||
if not title_url:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parsed_url = urlparse(title_url)
|
new_url = updateUrl(item['title_url'], domain)
|
||||||
hostname = parsed_url.hostname
|
|
||||||
domain_part = hostname.split('.')[1]
|
|
||||||
new_url = title_url.replace(domain_part, domain)
|
|
||||||
|
|
||||||
result = get_infoSelectTitle(new_url, domain, version)
|
result = get_infoSelectTitle(new_url, domain, version)
|
||||||
|
|
||||||
if not result or 'season_count' not in result:
|
if not result or 'season_count' not in result:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
number_season = result.get("season_count")
|
number_season = result.get("season_count")
|
||||||
|
|
||||||
if number_season > item.get("season"):
|
if number_season > item.get("season"):
|
||||||
title_newSeasons.append({
|
title_new_seasons.append({
|
||||||
'title_url': item.get('title_url'),
|
'title_url': item['title_url'],
|
||||||
'name': item.get('name'),
|
'name': item['name'],
|
||||||
'season': int(number_season),
|
'season': number_season,
|
||||||
'nNewSeason': int(number_season) - int(item.get("season"))
|
'nNewSeason': number_season - item['season']
|
||||||
})
|
})
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Errore nel recuperare informazioni per {item.get('title_url')}: {e}")
|
logging.error(f"Error checking new season for {item['title_url']}: {e}")
|
||||||
|
|
||||||
if title_newSeasons:
|
if title_new_seasons:
|
||||||
return jsonify(title_newSeasons), 200
|
logging.info(f"New seasons found: {len(title_new_seasons)}")
|
||||||
else:
|
return title_new_seasons
|
||||||
return jsonify({'message': 'Nessuna nuova stagione disponibile'}), 200
|
|
||||||
|
return {"message": "Nessuna nuova stagione disponibile"}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- DOWNLOAD UTIL MONGO ------------
|
||||||
# ---------- DOWNLOAD MONGO ------------
|
|
||||||
def ensure_collections_exist(db):
|
def ensure_collections_exist(db):
|
||||||
"""
|
|
||||||
Ensures that the required collections exist in the database.
|
|
||||||
If they do not exist, they are created.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: The MongoDB database object.
|
|
||||||
"""
|
|
||||||
required_collections = ['watchlist', 'downloads']
|
required_collections = ['watchlist', 'downloads']
|
||||||
existing_collections = db.list_collection_names()
|
existing_collections = db.list_collection_names()
|
||||||
|
|
||||||
@ -411,190 +361,122 @@ def ensure_collections_exist(db):
|
|||||||
else:
|
else:
|
||||||
logging.info(f"Collection already exists: {collection_name}")
|
logging.info(f"Collection already exists: {collection_name}")
|
||||||
|
|
||||||
@app.route('/downloads', methods=['GET'])
|
|
||||||
def fetch_all_downloads():
|
@app.get("/server/path/get")
|
||||||
"""
|
async def fetch_all_downloads():
|
||||||
Endpoint to fetch all downloads.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
downloads = list(downloads_collection.find({}, {'_id': 0}))
|
downloads = list(downloads_collection.find({}, {'_id': 0}))
|
||||||
return jsonify(downloads), 200
|
logging.info("Downloads retrieved")
|
||||||
|
return downloads
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error fetching all downloads: {str(e)}")
|
logging.error(f"Error fetching downloads: {e}")
|
||||||
return []
|
raise HTTPException(status_code=500, detail="Errore nel recupero dei download")
|
||||||
|
|
||||||
|
@app.get("/server/path/movie")
|
||||||
|
async def fetch_movie_path(id: Optional[int] = Query(None)):
|
||||||
|
if not id:
|
||||||
|
logging.warning("Movie path request without ID parameter")
|
||||||
|
raise HTTPException(status_code=400, detail="Missing movie ID")
|
||||||
|
|
||||||
@app.route('/deleteEpisode', methods=['DELETE'])
|
|
||||||
def remove_episode():
|
|
||||||
"""
|
|
||||||
Endpoint to delete a specific episode and its file.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
series_id = request.args.get('id')
|
movie = downloads_collection.find_one(
|
||||||
season_number = request.args.get('season')
|
{'type': 'movie', 'id': id},
|
||||||
episode_number = request.args.get('episode')
|
{'_id': 0, 'path': 1}
|
||||||
|
)
|
||||||
if not series_id or not season_number or not episode_number:
|
|
||||||
return jsonify({'error': 'Missing parameters (id, season, episode)'}), 400
|
|
||||||
|
|
||||||
try:
|
|
||||||
series_id = int(series_id)
|
|
||||||
season_number = int(season_number)
|
|
||||||
episode_number = int(episode_number)
|
|
||||||
except ValueError:
|
|
||||||
return jsonify({'error': 'Invalid season or episode number'}), 400
|
|
||||||
|
|
||||||
# Trova il percorso del file
|
|
||||||
episode = downloads_collection.find_one({
|
|
||||||
'type': 'tv',
|
|
||||||
'id': series_id,
|
|
||||||
'n_s': season_number,
|
|
||||||
'n_ep': episode_number
|
|
||||||
}, {'_id': 0, 'path': 1})
|
|
||||||
|
|
||||||
if not episode or 'path' not in episode:
|
|
||||||
return jsonify({'error': 'Episode not found'}), 404
|
|
||||||
|
|
||||||
file_path = episode['path']
|
|
||||||
|
|
||||||
# Elimina il file fisico
|
|
||||||
try:
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
os.remove(file_path)
|
|
||||||
logging.info(f"Deleted episode file: {file_path}")
|
|
||||||
else:
|
|
||||||
logging.warning(f"Episode file not found: {file_path}")
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error deleting episode file: {str(e)}")
|
|
||||||
|
|
||||||
# Rimuovi l'episodio dal database
|
|
||||||
result = downloads_collection.delete_one({
|
|
||||||
'type': 'tv',
|
|
||||||
'id': series_id,
|
|
||||||
'n_s': season_number,
|
|
||||||
'n_ep': episode_number
|
|
||||||
})
|
|
||||||
|
|
||||||
if result.deleted_count > 0:
|
|
||||||
return jsonify({'success': True}), 200
|
|
||||||
else:
|
|
||||||
return jsonify({'error': 'Failed to delete episode from database'}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error deleting episode: {str(e)}")
|
|
||||||
return jsonify({'error': 'Failed to delete episode'}), 500
|
|
||||||
|
|
||||||
@app.route('/deleteMovie', methods=['DELETE'])
|
|
||||||
def remove_movie():
|
|
||||||
"""
|
|
||||||
Endpoint to delete a specific movie, its file, and its parent folder if empty.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
movie_id = request.args.get('id')
|
|
||||||
|
|
||||||
if not movie_id:
|
|
||||||
return jsonify({'error': 'Missing movie ID'}), 400
|
|
||||||
|
|
||||||
# Trova il percorso del file
|
|
||||||
movie = downloads_collection.find_one({'type': 'movie', 'id': movie_id}, {'_id': 0, 'path': 1})
|
|
||||||
|
|
||||||
if not movie or 'path' not in movie:
|
|
||||||
return jsonify({'error': 'Movie not found'}), 404
|
|
||||||
|
|
||||||
file_path = movie['path']
|
|
||||||
parent_folder = os.path.dirname(file_path)
|
|
||||||
|
|
||||||
# Elimina il file fisico
|
|
||||||
try:
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
os.remove(file_path)
|
|
||||||
logging.info(f"Deleted movie file: {file_path}")
|
|
||||||
else:
|
|
||||||
logging.warning(f"Movie file not found: {file_path}")
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error deleting movie file: {str(e)}")
|
|
||||||
|
|
||||||
# Elimina la cartella superiore se vuota
|
|
||||||
try:
|
|
||||||
if os.path.exists(parent_folder) and not os.listdir(parent_folder):
|
|
||||||
os.rmdir(parent_folder)
|
|
||||||
logging.info(f"Deleted empty parent folder: {parent_folder}")
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error deleting parent folder: {str(e)}")
|
|
||||||
|
|
||||||
# Rimuovi il film dal database
|
|
||||||
result = downloads_collection.delete_one({'type': 'movie', 'id': movie_id})
|
|
||||||
|
|
||||||
if result.deleted_count > 0:
|
|
||||||
return jsonify({'success': True}), 200
|
|
||||||
else:
|
|
||||||
return jsonify({'error': 'Failed to delete movie from database'}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error deleting movie: {str(e)}")
|
|
||||||
return jsonify({'error': 'Failed to delete movie'}), 500
|
|
||||||
|
|
||||||
@app.route('/moviePath', methods=['GET'])
|
|
||||||
def fetch_movie_path():
|
|
||||||
"""
|
|
||||||
Endpoint to fetch the path of a specific movie.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
movie_id = int(request.args.get('id'))
|
|
||||||
|
|
||||||
if not movie_id:
|
|
||||||
return jsonify({'error': 'Missing movie ID'}), 400
|
|
||||||
|
|
||||||
movie = downloads_collection.find_one({'type': 'movie', 'id': movie_id}, {'_id': 0, 'path': 1})
|
|
||||||
|
|
||||||
if movie and 'path' in movie:
|
if movie and 'path' in movie:
|
||||||
return jsonify({'path': movie['path']}), 200
|
logging.info(f"Movie path retrieved: {movie['path']}")
|
||||||
|
return {"path": movie['path']}
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return jsonify({'error': 'Movie not found'}), 404
|
logging.warning(f"Movie not found: ID {id}")
|
||||||
|
raise HTTPException(status_code=404, detail="Movie not found")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error fetching movie path: {str(e)}")
|
logging.error(f"Error fetching movie path: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Failed to fetch movie path'}), 500
|
raise HTTPException(status_code=500, detail="Failed to fetch movie path")
|
||||||
|
|
||||||
|
@app.get("/server/path/episode")
|
||||||
|
async def fetch_episode_path(id: Optional[int] = Query(None), season: Optional[int] = Query(None), episode: Optional[int] = Query(None)):
|
||||||
|
if not id or not season or not episode:
|
||||||
|
logging.warning("Episode path request with missing parameters")
|
||||||
|
raise HTTPException(status_code=400, detail="Missing parameters (id, season, episode)")
|
||||||
|
|
||||||
@app.route('/episodePath', methods=['GET'])
|
|
||||||
def fetch_episode_path():
|
|
||||||
"""
|
|
||||||
Endpoint to fetch the path of a specific episode.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
series_id = request.args.get('id')
|
episode_data = downloads_collection.find_one(
|
||||||
season_number = request.args.get('season')
|
{'type': 'tv', 'id': id, 'n_s': season, 'n_ep': episode},
|
||||||
episode_number = request.args.get('episode')
|
{'_id': 0, 'path': 1}
|
||||||
|
)
|
||||||
|
|
||||||
if not series_id or not season_number or not episode_number:
|
if episode_data and 'path' in episode_data:
|
||||||
return jsonify({'error': 'Missing parameters (id, season, episode)'}), 400
|
logging.info(f"Episode path retrieved: {episode_data['path']}")
|
||||||
|
return {"path": episode_data['path']}
|
||||||
try:
|
|
||||||
series_id = int(series_id)
|
|
||||||
season_number = int(season_number)
|
|
||||||
episode_number = int(episode_number)
|
|
||||||
except ValueError:
|
|
||||||
return jsonify({'error': 'Invalid season or episode number'}), 400
|
|
||||||
|
|
||||||
episode = downloads_collection.find_one({
|
|
||||||
'type': 'tv',
|
|
||||||
'id': series_id,
|
|
||||||
'n_s': season_number,
|
|
||||||
'n_ep': episode_number
|
|
||||||
}, {'_id': 0, 'path': 1})
|
|
||||||
|
|
||||||
if episode and 'path' in episode:
|
|
||||||
return jsonify({'path': episode['path']}), 200
|
|
||||||
else:
|
else:
|
||||||
return jsonify({'error': 'Episode not found'}), 404
|
logging.warning(f"Episode not found: ID {id}, Season {season}, Episode {episode}")
|
||||||
|
raise HTTPException(status_code=404, detail="Episode not found")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error fetching episode path: {str(e)}")
|
logging.error(f"Error fetching episode path: {str(e)}", exc_info=True)
|
||||||
return jsonify({'error': 'Failed to fetch episode path'}), 500
|
raise HTTPException(status_code=500, detail="Failed to fetch episode path")
|
||||||
|
|
||||||
|
@app.delete("/server/delete/episode")
|
||||||
|
async def remove_episode(series_id: int = Query(...), season_number: int = Query(...), episode_number: int = Query(...)):
|
||||||
|
episode = downloads_collection.find_one({
|
||||||
|
'type': 'tv',
|
||||||
|
'id': series_id,
|
||||||
|
'n_s': season_number,
|
||||||
|
'n_ep': episode_number
|
||||||
|
}, {'_id': 0, 'path': 1})
|
||||||
|
|
||||||
|
if not episode:
|
||||||
|
logging.warning(f"Episode not found: S{season_number}E{episode_number}")
|
||||||
|
raise HTTPException(status_code=404, detail="Episodio non trovato")
|
||||||
|
|
||||||
|
file_path = episode.get('path')
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
os.remove(file_path)
|
||||||
|
logging.info(f"Episode file deleted: {file_path}")
|
||||||
|
|
||||||
|
downloads_collection.delete_one({
|
||||||
|
'type': 'tv',
|
||||||
|
'id': series_id,
|
||||||
|
'n_s': season_number,
|
||||||
|
'n_ep': episode_number
|
||||||
|
})
|
||||||
|
|
||||||
|
return {"success": True}
|
||||||
|
|
||||||
|
@app.delete("/server/delete/movie")
|
||||||
|
async def remove_movie(movie_id: int = Query(...)):
|
||||||
|
movie = downloads_collection.find_one({'type': 'movie', 'id': movie_id}, {'_id': 0, 'path': 1})
|
||||||
|
|
||||||
|
if not movie:
|
||||||
|
logging.warning(f"Movie not found: ID {movie_id}")
|
||||||
|
raise HTTPException(status_code=404, detail="Film non trovato")
|
||||||
|
|
||||||
|
file_path = movie.get('path')
|
||||||
|
parent_folder = os.path.dirname(file_path)
|
||||||
|
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
os.remove(file_path)
|
||||||
|
logging.info(f"Movie file deleted: {file_path}")
|
||||||
|
|
||||||
|
if os.path.exists(parent_folder) and not os.listdir(parent_folder):
|
||||||
|
os.rmdir(parent_folder)
|
||||||
|
logging.info(f"Parent folder deleted: {parent_folder}")
|
||||||
|
|
||||||
|
downloads_collection.delete_one({'type': 'movie', 'id': movie_id})
|
||||||
|
return {"success": True}
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
ensure_collections_exist(db)
|
ensure_collections_exist(db)
|
||||||
app.run(debug=True, port=1234, threaded=True)
|
uvicorn.run(
|
||||||
|
"server:app",
|
||||||
|
host="127.0.0.1",
|
||||||
|
port=1234,
|
||||||
|
reload=False
|
||||||
|
)
|
23
server_type.py
Normal file
23
server_type.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# 13.12.24
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional, Dict, List
|
||||||
|
|
||||||
|
|
||||||
|
class WatchlistItem(BaseModel):
|
||||||
|
name: str
|
||||||
|
url: Optional[str] = None
|
||||||
|
title_url: Optional[str] = None
|
||||||
|
season: Optional[int] = None
|
||||||
|
added_on: Optional[datetime.datetime] = None
|
||||||
|
|
||||||
|
class UpdateWatchlistItem(BaseModel):
|
||||||
|
url: str = None
|
||||||
|
season: int = None
|
||||||
|
|
||||||
|
class DownloadRequest(BaseModel):
|
||||||
|
id: str
|
||||||
|
slug: Optional[str] = None
|
||||||
|
season: Optional[int] = None
|
||||||
|
episode: Optional[int] = None
|
13
server_util.py
Normal file
13
server_util.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# 13.12.24
|
||||||
|
|
||||||
|
from urllib.parse import urlparse, unquote
|
||||||
|
|
||||||
|
|
||||||
|
def updateUrl(oldUlr: str, domain: str):
|
||||||
|
|
||||||
|
parsed_url = urlparse(oldUlr)
|
||||||
|
hostname = parsed_url.hostname
|
||||||
|
domain_part = hostname.split('.')[1]
|
||||||
|
new_url = oldUlr.replace(domain_part, domain)
|
||||||
|
|
||||||
|
return new_url
|
Loading…
x
Reference in New Issue
Block a user