mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-07-19 00:20:00 +00:00
Delete .site
This commit is contained in:
parent
c59502c1fd
commit
d51665f5ac
2
.github/workflows/pages.yml
vendored
2
.github/workflows/pages.yml
vendored
@ -42,4 +42,4 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Deploy to GitHub Pages
|
- name: Deploy to GitHub Pages
|
||||||
id: deployment
|
id: deployment
|
||||||
uses: actions/deploy-pages@v4
|
uses: actions/deploy-pages@v4
|
@ -1,8 +1,11 @@
|
|||||||
# 10.12.23
|
# 10.12.23
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import httpx
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
|
|
||||||
|
|
||||||
@ -41,16 +44,34 @@ def title_search(query: str) -> int:
|
|||||||
media_search_manager.clear()
|
media_search_manager.clear()
|
||||||
table_show_manager.clear()
|
table_show_manager.clear()
|
||||||
|
|
||||||
search_url = f"{site_constant.FULL_URL}/api/search?q={query}"
|
try:
|
||||||
|
response = httpx.get(
|
||||||
|
site_constant.FULL_URL,
|
||||||
|
headers={'user-agent': get_userAgent()},
|
||||||
|
timeout=max_timeout
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
soup = BeautifulSoup(response.text, 'html.parser')
|
||||||
|
version = json.loads(soup.find('div', {'id': "app"}).get("data-page"))['version']
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[red]Site: {site_constant.SITE_NAME} version, request error: {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
search_url = f"{site_constant.FULL_URL}/search?q={query}"
|
||||||
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = httpx.get(
|
response = httpx.get(
|
||||||
search_url,
|
search_url,
|
||||||
headers={'user-agent': get_userAgent()},
|
headers = {
|
||||||
timeout=max_timeout,
|
'referer': site_constant.FULL_URL,
|
||||||
follow_redirects=True,
|
'user-agent': get_userAgent(),
|
||||||
verify=False
|
'x-inertia': 'true',
|
||||||
|
'x-inertia-version': version
|
||||||
|
},
|
||||||
|
timeout=max_timeout
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
@ -66,7 +87,7 @@ def title_search(query: str) -> int:
|
|||||||
|
|
||||||
# Collect json data
|
# Collect json data
|
||||||
try:
|
try:
|
||||||
data = response.json().get('data', [])
|
data = response.json().get('props').get('titles')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.log(f"Error parsing JSON response: {e}")
|
console.log(f"Error parsing JSON response: {e}")
|
||||||
return 0
|
return 0
|
||||||
|
@ -96,17 +96,16 @@ class GetSerieInfo:
|
|||||||
if not season:
|
if not season:
|
||||||
logging.error(f"Season {number_season} not found")
|
logging.error(f"Season {number_season} not found")
|
||||||
return
|
return
|
||||||
|
|
||||||
response = httpx.get(
|
response = httpx.get(
|
||||||
url=f'{self.url}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}',
|
url=f'{self.url}/titles/{self.media_id}-{self.series_name}/season-{number_season}',
|
||||||
headers={
|
headers={
|
||||||
'User-Agent': get_userAgent(),
|
'User-Agent': self.headers['user-agent'],
|
||||||
'x-inertia': 'true',
|
'x-inertia': 'true',
|
||||||
'x-inertia-version': self.version,
|
'x-inertia-version': self.version,
|
||||||
},
|
},
|
||||||
timeout=max_timeout
|
timeout=max_timeout
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
# Extract episodes from JSON response
|
# Extract episodes from JSON response
|
||||||
json_response = response.json().get('props', {}).get('loadedSeason', {}).get('episodes', [])
|
json_response = response.json().get('props', {}).get('loadedSeason', {}).get('episodes', [])
|
||||||
|
Loading…
x
Reference in New Issue
Block a user