mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-07 20:15:24 +00:00
Restore ...
This commit is contained in:
parent
40328fe99f
commit
43f0d45fa9
@ -99,7 +99,8 @@ You can change some behaviors by tweaking the configuration file.
|
|||||||
* **verify_ssl**: Whether to verify SSL certificates.
|
* **verify_ssl**: Whether to verify SSL certificates.
|
||||||
- **Default Value**: `false`
|
- **Default Value**: `false`
|
||||||
|
|
||||||
* **proxy**: To use proxy create a file with name list_proxy.txt and copy ip and port like "122.114.232.137:8080". They need to be http
|
* **proxy**: The proxy to use for requests. (Note: This parameter works only with HTTP and HTTPS protocols.)
|
||||||
|
- **Example Value**: `["http://user:pass@38.154.227.167:5868"]`
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
@ -47,12 +47,12 @@ class VideoSource:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = httpx.get(url, headers=self.headers, follow_redirects=True)
|
response = requests.get(url, headers=self.headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response.text
|
return response.text
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Request failed [supervideo]: {e}")
|
logging.error(f"Request failed: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def parse_html(self, html_content: str) -> BeautifulSoup:
|
def parse_html(self, html_content: str) -> BeautifulSoup:
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from unidecode import unidecode
|
from unidecode import unidecode
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ def title_search(title_search: str) -> int:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Send request to search for titles
|
# Send request to search for titles
|
||||||
response = httpx.get(f"https://{SITE_NAME}.{DOMAIN_NOW}/page/1/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3", headers={'user-agent': get_headers()})
|
response = requests.get(f"https://{SITE_NAME}.{DOMAIN_NOW}/page/1/?story={unidecode(title_search.replace(' ', '+'))}&do=search&subaction=search&titleonly=3", headers={'user-agent': get_headers()})
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Create soup and find table
|
# Create soup and find table
|
||||||
|
@ -6,7 +6,7 @@ from urllib.parse import urljoin, urlparse, parse_qs, urlencode, urlunparse
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
@ -60,7 +60,7 @@ class VideoSource:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
|
||||||
response = httpx.get(f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/")
|
response = requests.get(f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/")
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Parse JSON response and return episode count
|
# Parse JSON response and return episode count
|
||||||
@ -87,7 +87,7 @@ class VideoSource:
|
|||||||
"end_range": index_ep + 1
|
"end_range": index_ep + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
response = httpx.get(f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/{index_ep}", params = params)
|
response = requests.get(f"https://www.{self.base_name}.{self.domain}/info_api/{self.media_id}/{index_ep}", params = params)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Return information about the episode
|
# Return information about the episode
|
||||||
@ -110,7 +110,7 @@ class VideoSource:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
|
||||||
response = httpx.get(f"https://www.{self.base_name}.{self.domain}/embed-url/{episode_id}")
|
response = requests.get(f"https://www.{self.base_name}.{self.domain}/embed-url/{episode_id}")
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Extract and clean embed URL
|
# Extract and clean embed URL
|
||||||
@ -118,7 +118,7 @@ class VideoSource:
|
|||||||
self.iframe_src = embed_url
|
self.iframe_src = embed_url
|
||||||
|
|
||||||
# Fetch video content using embed URL
|
# Fetch video content using embed URL
|
||||||
video_response = httpx.get(embed_url)
|
video_response = requests.get(embed_url)
|
||||||
video_response.raise_for_status()
|
video_response.raise_for_status()
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
@ -28,7 +28,7 @@ def check_url_for_content(url: str, content: str) -> bool:
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
logging.info(f"Test site to extract domain: {url}")
|
logging.info(f"Test site to extract domain: {url}")
|
||||||
response = httpx.get(url, timeout = 1)
|
response = requests.get(url, timeout = 1)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
if content in response.text:
|
if content in response.text:
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from unidecode import unidecode
|
from unidecode import unidecode
|
||||||
|
|
||||||
@ -45,7 +45,7 @@ def get_token(site_name: str, domain: str) -> dict:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Send a GET request to the specified URL composed of the site name and domain
|
# Send a GET request to the specified URL composed of the site name and domain
|
||||||
response = httpx.get(f"https://www.{site_name}.{domain}")
|
response = requests.get(f"https://www.{site_name}.{domain}")
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Initialize variables to store CSRF token
|
# Initialize variables to store CSRF token
|
||||||
@ -83,11 +83,11 @@ def update_domain():
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
console.log(f"[cyan]Test site: [red]https://{SITE_NAME}.{DOMAIN_NOW}")
|
console.log(f"[cyan]Test site: [red]https://{SITE_NAME}.{DOMAIN_NOW}")
|
||||||
response = httpx.get(f"https://www.{SITE_NAME}.{DOMAIN_NOW}")
|
response = requests.get(f"https://www.{SITE_NAME}.{DOMAIN_NOW}")
|
||||||
response.status_code
|
response.status_code
|
||||||
|
|
||||||
# If the current site is inaccessible, try to obtain a new domain
|
# If the current site is inaccessible, try to obtain a new domain
|
||||||
except Exception as e:
|
except:
|
||||||
|
|
||||||
# Get new domain
|
# Get new domain
|
||||||
console.print("[red]\nExtract new DOMAIN from TLD list.")
|
console.print("[red]\nExtract new DOMAIN from TLD list.")
|
||||||
@ -166,7 +166,7 @@ def title_search(title: str) -> int:
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Send a POST request to the API endpoint for live search
|
# Send a POST request to the API endpoint for live search
|
||||||
response = httpx.post(f'https://www.{SITE_NAME}.{url_domain}/livesearch', cookies=cookies, headers=headers, json=json_data)
|
response = requests.post(f'https://www.{SITE_NAME}.{url_domain}/livesearch', cookies=cookies, headers=headers, json_data=json_data)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Process each record returned in the response
|
# Process each record returned in the response
|
||||||
|
@ -7,8 +7,9 @@ from urllib.parse import urlparse
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
from unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
@ -40,10 +41,9 @@ def title_search() -> int:
|
|||||||
|
|
||||||
# Send request to search for titles
|
# Send request to search for titles
|
||||||
try:
|
try:
|
||||||
response = httpx.get(url_search, headers={'user-agent': get_headers()}, cookies=cookie_index)
|
response = requests.get(url_search, headers={'user-agent': get_headers()}, cookies=cookie_index)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
except:
|
||||||
except Exception as e:
|
|
||||||
logging.error("Insert: {'ips4_IPSSessionFront': 'your_code', 'ips4_member_id': 'your_code'} in config file \ REQUESTS \ index, instead of user-agent. Use browser debug and cookie request with a valid account.")
|
logging.error("Insert: {'ips4_IPSSessionFront': 'your_code', 'ips4_member_id': 'your_code'} in config file \ REQUESTS \ index, instead of user-agent. Use browser debug and cookie request with a valid account.")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
@ -54,8 +54,7 @@ def title_search() -> int:
|
|||||||
# Get url and filename
|
# Get url and filename
|
||||||
try:
|
try:
|
||||||
mp4_link = souce.get("src")
|
mp4_link = souce.get("src")
|
||||||
|
except:
|
||||||
except Exception as e:
|
|
||||||
logging.error("Insert: {'ips4_IPSSessionFront': 'your_code', 'ips4_member_id': 'your_code'} in config file \ REQUESTS \ index, instead of user-agent. Use browser debug and cookie request with a valid account.")
|
logging.error("Insert: {'ips4_IPSSessionFront': 'your_code', 'ips4_member_id': 'your_code'} in config file \ REQUESTS \ index, instead of user-agent. Use browser debug and cookie request with a valid account.")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
# 26.05.24
|
# 26.05.24
|
||||||
|
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
@ -46,7 +47,7 @@ class VideoSource:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = httpx.get(url, headers=self.headers)
|
response = requests.get(url, headers=self.headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
with open('index.html', 'w', encoding='utf-8') as file:
|
with open('index.html', 'w', encoding='utf-8') as file:
|
||||||
|
@ -7,7 +7,7 @@ from urllib.parse import urlparse
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
@ -36,7 +36,7 @@ def title_search() -> int:
|
|||||||
url_search = msg.ask(f"[cyan]Insert url title")
|
url_search = msg.ask(f"[cyan]Insert url title")
|
||||||
|
|
||||||
# Send request to search for titles
|
# Send request to search for titles
|
||||||
response = httpx.get(url_search, headers={'user-agent': get_headers()})
|
response = requests.get(url_search, headers={'user-agent': get_headers()})
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Get playlist
|
# Get playlist
|
||||||
|
@ -6,7 +6,7 @@ from urllib.parse import urljoin, urlparse, parse_qs, urlencode, urlunparse
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ class VideoSource:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
response = httpx.get(f"https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}", headers=self.headers)
|
response = requests.get(f"https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}", headers=self.headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Extract JSON response if available
|
# Extract JSON response if available
|
||||||
@ -90,7 +90,7 @@ class VideoSource:
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
# Make a request to collect information about a specific season
|
# Make a request to collect information about a specific season
|
||||||
response = httpx.get(f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}', headers=self.headers)
|
response = requests.get(f'https://{self.base_name}.{self.domain}/titles/{self.media_id}-{self.series_name}/stagione-{number_season}', headers=self.headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Extract JSON response if available
|
# Extract JSON response if available
|
||||||
@ -122,7 +122,7 @@ class VideoSource:
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
# Make a request to get iframe source
|
# Make a request to get iframe source
|
||||||
response = httpx.get(f"https://{self.base_name}.{self.domain}/iframe/{self.media_id}", params=params)
|
response = requests.get(f"https://{self.base_name}.{self.domain}/iframe/{self.media_id}", params=params)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Parse response with BeautifulSoup to get iframe source
|
# Parse response with BeautifulSoup to get iframe source
|
||||||
@ -164,15 +164,15 @@ class VideoSource:
|
|||||||
|
|
||||||
# Make a request to get content
|
# Make a request to get content
|
||||||
try:
|
try:
|
||||||
response = httpx.get(self.iframe_src, headers=self.headers)
|
response = requests.get(self.iframe_src, headers=self.headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
except Exception as e:
|
except:
|
||||||
print("\n")
|
print("\n")
|
||||||
console.print(Panel("[red bold]Coming soon", title="Notification", title_align="left", border_style="yellow"))
|
console.print(Panel("[red bold]Coming soon", title="Notification", title_align="left", border_style="yellow"))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.ok:
|
||||||
|
|
||||||
# Parse response with BeautifulSoup to get content
|
# Parse response with BeautifulSoup to get content
|
||||||
soup = BeautifulSoup(response.text, "html.parser")
|
soup = BeautifulSoup(response.text, "html.parser")
|
||||||
|
@ -6,7 +6,7 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
# External library
|
# External library
|
||||||
import httpx
|
import requests
|
||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
@ -28,7 +28,7 @@ def check_url_for_content(url: str, content: str) -> bool:
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
logging.info(f"Test site to extract domain: {url}")
|
logging.info(f"Test site to extract domain: {url}")
|
||||||
response = httpx.get(url, timeout = 1)
|
response = requests.get(url, timeout = 1)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
if content in response.text:
|
if content in response.text:
|
||||||
|
@ -8,7 +8,7 @@ from typing import Tuple
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from unidecode import unidecode
|
from unidecode import unidecode
|
||||||
|
|
||||||
@ -102,7 +102,7 @@ def get_version_and_domain(new_domain = None) -> Tuple[str, str]:
|
|||||||
|
|
||||||
# Make requests to site to get text
|
# Make requests to site to get text
|
||||||
console.print(f"[cyan]Test site[white]: [red]https://{SITE_NAME}.{config_domain}")
|
console.print(f"[cyan]Test site[white]: [red]https://{SITE_NAME}.{config_domain}")
|
||||||
response = httpx.get(f"https://{SITE_NAME}.{config_domain}")
|
response = requests.get(f"https://{SITE_NAME}.{config_domain}")
|
||||||
console.print(f"[cyan]Test respost site[white]: [red]{response.status_code} \n")
|
console.print(f"[cyan]Test respost site[white]: [red]{response.status_code} \n")
|
||||||
|
|
||||||
# Extract version from the response
|
# Extract version from the response
|
||||||
@ -137,7 +137,7 @@ def title_search(title_search: str, domain: str) -> int:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Send request to search for titles ( replace à to a and space to "+" )
|
# Send request to search for titles ( replace à to a and space to "+" )
|
||||||
response = httpx.get(f"https://{SITE_NAME}.{domain}/api/search?q={unidecode(title_search.replace(' ', '+'))}", headers={'user-agent': get_headers()})
|
response = requests.get(f"https://{SITE_NAME}.{domain}/api/search?q={unidecode(title_search.replace(' ', '+'))}", headers={'user-agent': get_headers()})
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Add found titles to media search manager
|
# Add found titles to media search manager
|
||||||
|
@ -8,7 +8,7 @@ from typing import Generator, Optional
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ def search(query: str, num: int = 10, stop: Optional[int] = None, pause: float =
|
|||||||
time.sleep(pause)
|
time.sleep(pause)
|
||||||
|
|
||||||
# Fetch the HTML content of the search page
|
# Fetch the HTML content of the search page
|
||||||
html = httpx.get(url).text
|
html = requests.get(url).text
|
||||||
soup = BeautifulSoup(html, 'html.parser')
|
soup = BeautifulSoup(html, 'html.parser')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -6,7 +6,7 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
|
||||||
@ -38,7 +38,7 @@ def MP4_downloader(url: str, path: str, referer: str, add_desc: str):
|
|||||||
|
|
||||||
# Make request to get content of video
|
# Make request to get content of video
|
||||||
logging.info(f"Make request to fetch mp4 from: {url}")
|
logging.info(f"Make request to fetch mp4 from: {url}")
|
||||||
response = httpx.get(url, stream=True, headers={'Referer': referer, 'user-agent': get_headers()}, verify=REQUEST_VERIFY, timeout=REQUEST_TIMEOUT)
|
response = requests.get(url, stream=True, headers={'Referer': referer, 'user-agent': get_headers()}, verify=REQUEST_VERIFY, timeout=REQUEST_TIMEOUT)
|
||||||
total = int(response.headers.get('content-length', 0))
|
total = int(response.headers.get('content-length', 0))
|
||||||
|
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ from concurrent.futures import ThreadPoolExecutor
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
from unidecode import unidecode
|
from unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
@ -71,7 +71,7 @@ class Downloader():
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
- output_filename (str): Output filename for the downloaded content.
|
- output_filename (str): Output filename for the downloaded content.
|
||||||
- m3u8_playlist (str, optional): URL to the main M3U8 playlist.
|
- m3u8_playlist (str, optional): URL to the main M3U8 playlist or text.
|
||||||
- m3u8_playlist (str, optional): URL to the main M3U8 index. ( NOT TEXT )
|
- m3u8_playlist (str, optional): URL to the main M3U8 index. ( NOT TEXT )
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -139,10 +139,9 @@ class Downloader():
|
|||||||
# Send a GET request to the provided URL
|
# Send a GET request to the provided URL
|
||||||
logging.info(f"Test url: {url}")
|
logging.info(f"Test url: {url}")
|
||||||
headers_index['user-agent'] = get_headers()
|
headers_index['user-agent'] = get_headers()
|
||||||
response = httpx.get(url, headers=headers_index)
|
response = requests.get(url, headers=headers_index)
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.ok:
|
||||||
return response.text
|
return response.text
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -322,10 +321,9 @@ class Downloader():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Send a GET request to download the subtitle content
|
# Send a GET request to download the subtitle content
|
||||||
response = httpx.get(uri)
|
response = requests.get(uri)
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.ok:
|
||||||
|
|
||||||
# Write the content to the specified file
|
# Write the content to the specified file
|
||||||
with open(path, "wb") as f:
|
with open(path, "wb") as f:
|
||||||
@ -370,7 +368,7 @@ class Downloader():
|
|||||||
m3u8_sub_parser = M3U8_Parser()
|
m3u8_sub_parser = M3U8_Parser()
|
||||||
m3u8_sub_parser.parse_data(
|
m3u8_sub_parser.parse_data(
|
||||||
uri = obj_subtitle.get('uri'),
|
uri = obj_subtitle.get('uri'),
|
||||||
raw_content = httpx.get(obj_subtitle.get('uri')).text
|
raw_content = requests.get(obj_subtitle.get('uri')).text
|
||||||
)
|
)
|
||||||
|
|
||||||
# Initiate the download of the subtitle content
|
# Initiate the download of the subtitle content
|
||||||
@ -502,15 +500,16 @@ class Downloader():
|
|||||||
if self.m3u8_playlist:
|
if self.m3u8_playlist:
|
||||||
logging.info("Download from PLAYLIST")
|
logging.info("Download from PLAYLIST")
|
||||||
|
|
||||||
|
# Fetch the M3U8 playlist content
|
||||||
|
if not len(str(self.m3u8_playlist).split("\n")) > 2: # Is a single link
|
||||||
|
m3u8_playlist_text = self.__df_make_req__(self.m3u8_playlist)
|
||||||
|
|
||||||
m3u8_playlist_text = self.__df_make_req__(self.m3u8_playlist)
|
# Add full URL of the M3U8 playlist to fix next .ts without https if necessary
|
||||||
|
self.m3u8_url_fixer.set_playlist(self.m3u8_playlist) # !!!!!!!!!!!!!!!!!! to fix for playlist with text
|
||||||
|
|
||||||
# Add full URL of the M3U8 playlist to fix next .ts without https if necessary
|
else:
|
||||||
self.m3u8_url_fixer.set_playlist(self.m3u8_playlist)
|
logging.warning("M3U8 master url not set.") # TO DO
|
||||||
|
m3u8_playlist_text = self.m3u8_playlist
|
||||||
if m3u8_playlist_text is None:
|
|
||||||
console.log("[red]Playlist m3u8 to download is empty.")
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
# Save text playlist
|
# Save text playlist
|
||||||
open(os.path.join(self.base_path, "tmp", "playlist.m3u8"), "w+").write(m3u8_playlist_text)
|
open(os.path.join(self.base_path, "tmp", "playlist.m3u8"), "w+").write(m3u8_playlist_text)
|
||||||
|
@ -1,18 +1,16 @@
|
|||||||
# 09.06.24
|
# 09.06.24
|
||||||
|
|
||||||
import os
|
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
from Src.Util._jsonConfig import config_manager
|
from Src.Util._jsonConfig import config_manager
|
||||||
from Src.Util.os import check_file_existence
|
|
||||||
|
|
||||||
|
|
||||||
class ProxyManager:
|
class ProxyManager:
|
||||||
@ -41,16 +39,15 @@ class ProxyManager:
|
|||||||
- Proxy string if working, None otherwise
|
- Proxy string if working, None otherwise
|
||||||
"""
|
"""
|
||||||
protocol = proxy.split(":")[0].lower()
|
protocol = proxy.split(":")[0].lower()
|
||||||
protocol = f'{protocol}://'
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = httpx.get(self.url, proxies={protocol: proxy}, timeout=self.timeout)
|
response = requests.get(self.url, proxies={protocol: proxy}, timeout=self.timeout)
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
logging.info(f"Proxy {proxy} is working.")
|
logging.info(f"Proxy {proxy} is working.")
|
||||||
return proxy
|
return proxy
|
||||||
|
|
||||||
except Exception as e:
|
except requests.RequestException as e:
|
||||||
logging.error(f"Proxy {proxy} failed: {e}")
|
logging.error(f"Proxy {proxy} failed: {e}")
|
||||||
self.failed_proxies[proxy] = time.time()
|
self.failed_proxies[proxy] = time.time()
|
||||||
return None
|
return None
|
||||||
@ -60,9 +57,8 @@ class ProxyManager:
|
|||||||
Verify all proxies in the list and store the working ones.
|
Verify all proxies in the list and store the working ones.
|
||||||
"""
|
"""
|
||||||
logging.info("Starting proxy verification...")
|
logging.info("Starting proxy verification...")
|
||||||
with ThreadPoolExecutor(max_workers=os.cpu_count()) as executor:
|
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||||
self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
|
self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
|
||||||
|
|
||||||
self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
|
self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
|
||||||
logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
|
logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
|
||||||
|
|
||||||
@ -74,7 +70,6 @@ class ProxyManager:
|
|||||||
|
|
||||||
for proxy in self.verified_proxies:
|
for proxy in self.verified_proxies:
|
||||||
protocol = proxy.split(":")[0].lower()
|
protocol = proxy.split(":")[0].lower()
|
||||||
protocol = f'{protocol}://' # For httpx
|
|
||||||
validate_proxy.append({protocol: proxy})
|
validate_proxy.append({protocol: proxy})
|
||||||
|
|
||||||
return validate_proxy
|
return validate_proxy
|
||||||
@ -82,32 +77,12 @@ class ProxyManager:
|
|||||||
|
|
||||||
def main_test_proxy(url_test):
|
def main_test_proxy(url_test):
|
||||||
|
|
||||||
path_file_proxt_list = "list_proxy.txt"
|
|
||||||
|
|
||||||
if check_file_existence(path_file_proxt_list):
|
|
||||||
|
|
||||||
# Write test to pass THERE IS PROXY on config.json for segments
|
|
||||||
config_manager.set_key("REQUESTS", "proxy", ["192.168.1.1"])
|
|
||||||
|
|
||||||
# Read file
|
|
||||||
with open(path_file_proxt_list, 'r') as file:
|
|
||||||
ip_addresses = file.readlines()
|
|
||||||
|
|
||||||
# Formatt ip
|
|
||||||
ip_addresses = [ip.strip() for ip in ip_addresses]
|
|
||||||
formatted_ips = [f"http://{ip}" for ip in ip_addresses]
|
|
||||||
|
|
||||||
# Get list of proxy from config.json
|
# Get list of proxy from config.json
|
||||||
proxy_list = formatted_ips
|
proxy_list = config_manager.get_list('REQUESTS', 'proxy')
|
||||||
|
|
||||||
# Verify proxy
|
# Verify proxy
|
||||||
manager = ProxyManager(proxy_list, url_test)
|
manager = ProxyManager(proxy_list, url_test)
|
||||||
manager.verify_proxies()
|
manager.verify_proxies()
|
||||||
|
|
||||||
# Write valid ip in txt file
|
|
||||||
with open(path_file_proxt_list, 'w') as file:
|
|
||||||
for ip in ip_addresses:
|
|
||||||
file.write(f"{ip}\n")
|
|
||||||
|
|
||||||
# Return valid proxy
|
# Return valid proxy
|
||||||
return manager.get_verified_proxies()
|
return manager.get_verified_proxies()
|
||||||
|
@ -13,7 +13,8 @@ from concurrent.futures import ThreadPoolExecutor
|
|||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
|
from requests.exceptions import HTTPError, ConnectionError, Timeout, RequestException
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
|
||||||
@ -22,7 +23,6 @@ from Src.Util.console import console
|
|||||||
from Src.Util.headers import get_headers
|
from Src.Util.headers import get_headers
|
||||||
from Src.Util.color import Colors
|
from Src.Util.color import Colors
|
||||||
from Src.Util._jsonConfig import config_manager
|
from Src.Util._jsonConfig import config_manager
|
||||||
from Src.Util.os import check_file_existence
|
|
||||||
|
|
||||||
|
|
||||||
# Logic class
|
# Logic class
|
||||||
@ -45,12 +45,14 @@ TQDM_MAX_WORKER = config_manager.get_int('M3U8_DOWNLOAD', 'tdqm_workers')
|
|||||||
TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
|
TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
|
||||||
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
|
TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
|
||||||
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
|
REQUEST_TIMEOUT = config_manager.get_float('REQUESTS', 'timeout')
|
||||||
THERE_IS_PROXY_LIST = check_file_existence("list_proxy.txt")
|
THERE_IS_PROXY_LIST = len(config_manager.get_list('REQUESTS', 'proxy')) > 0
|
||||||
|
|
||||||
|
|
||||||
# Variable
|
# Variable
|
||||||
headers_index = config_manager.get_dict('REQUESTS', 'index')
|
headers_index = config_manager.get_dict('REQUESTS', 'index')
|
||||||
headers_segments = config_manager.get_dict('REQUESTS', 'segments')
|
headers_segments = config_manager.get_dict('REQUESTS', 'segments')
|
||||||
|
session = requests.Session()
|
||||||
|
session.verify = config_manager.get_bool('REQUESTS', 'verify_ssl')
|
||||||
|
|
||||||
|
|
||||||
class M3U8_Segments:
|
class M3U8_Segments:
|
||||||
@ -94,7 +96,7 @@ class M3U8_Segments:
|
|||||||
logging.info(f"Uri key: {key_uri}")
|
logging.info(f"Uri key: {key_uri}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = httpx.get(key_uri, headers=headers_index)
|
response = requests.get(key_uri, headers=headers_index)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -167,11 +169,11 @@ class M3U8_Segments:
|
|||||||
headers_index['user-agent'] = get_headers()
|
headers_index['user-agent'] = get_headers()
|
||||||
|
|
||||||
# Send a GET request to retrieve the index M3U8 file
|
# Send a GET request to retrieve the index M3U8 file
|
||||||
response = httpx.get(self.url, headers=headers_index)
|
response = requests.get(self.url, headers=headers_index)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Save the M3U8 file to the temporary folder
|
# Save the M3U8 file to the temporary folder
|
||||||
if response.status_code == 200:
|
if response.ok:
|
||||||
path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
|
path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
|
||||||
open(path_m3u8_file, "w+").write(response.text)
|
open(path_m3u8_file, "w+").write(response.text)
|
||||||
|
|
||||||
@ -197,9 +199,9 @@ class M3U8_Segments:
|
|||||||
if THERE_IS_PROXY_LIST:
|
if THERE_IS_PROXY_LIST:
|
||||||
proxy = self.valid_proxy[index % len(self.valid_proxy)]
|
proxy = self.valid_proxy[index % len(self.valid_proxy)]
|
||||||
logging.info(f"Use proxy: {proxy}")
|
logging.info(f"Use proxy: {proxy}")
|
||||||
response = httpx.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT, proxies=proxy, verify=False)
|
response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT, proxies=proxy)
|
||||||
else:
|
else:
|
||||||
response = httpx.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT, verify=False)
|
response = session.get(ts_url, headers=headers_segments, timeout=REQUEST_TIMEOUT)
|
||||||
|
|
||||||
# Get response content
|
# Get response content
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
@ -218,6 +220,10 @@ class M3U8_Segments:
|
|||||||
self.queue.put((index, segment_content))
|
self.queue.put((index, segment_content))
|
||||||
progress_bar.update(1)
|
progress_bar.update(1)
|
||||||
|
|
||||||
|
except (HTTPError, ConnectionError, Timeout, RequestException) as e:
|
||||||
|
progress_bar.update(1)
|
||||||
|
logging.error(f"Request-related exception while downloading segment: {e}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
progress_bar.update(1)
|
progress_bar.update(1)
|
||||||
logging.error(f"An unexpected exception occurred while download segment: {e}")
|
logging.error(f"An unexpected exception occurred while download segment: {e}")
|
||||||
@ -274,30 +280,10 @@ class M3U8_Segments:
|
|||||||
writer_thread = threading.Thread(target=self.write_segments_to_file)
|
writer_thread = threading.Thread(target=self.write_segments_to_file)
|
||||||
writer_thread.start()
|
writer_thread.start()
|
||||||
|
|
||||||
# Ff proxy avaiable set max_workers to number of proxy
|
|
||||||
# else set max_workers to TQDM_MAX_WORKER
|
|
||||||
max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
|
|
||||||
|
|
||||||
# if proxy avaiable set timeout to variable time
|
|
||||||
# else set timeout to TDQM_DELAY_WORKER
|
|
||||||
if THERE_IS_PROXY_LIST:
|
|
||||||
num_proxies = len(self.valid_proxy)
|
|
||||||
self.working_proxy_list = self.valid_proxy
|
|
||||||
|
|
||||||
if num_proxies > 0:
|
|
||||||
# calculate delay based on number of proxies
|
|
||||||
# dalay should be between 0.5 and 1
|
|
||||||
delay = max(0.5, min(1, 1 / (num_proxies + 1)))
|
|
||||||
else:
|
|
||||||
delay = TQDM_DELAY_WORKER
|
|
||||||
|
|
||||||
else:
|
|
||||||
delay = TQDM_DELAY_WORKER
|
|
||||||
|
|
||||||
# Start all workers
|
# Start all workers
|
||||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
with ThreadPoolExecutor(max_workers=TQDM_MAX_WORKER) as executor:
|
||||||
for index, segment_url in enumerate(self.segments):
|
for index, segment_url in enumerate(self.segments):
|
||||||
time.sleep(delay)
|
time.sleep(TQDM_DELAY_WORKER)
|
||||||
executor.submit(self.make_requests_stream, segment_url, index, progress_bar)
|
executor.submit(self.make_requests_stream, segment_url, index, progress_bar)
|
||||||
|
|
||||||
# Wait for all tasks to complete
|
# Wait for all tasks to complete
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
# 02.04.24
|
# 09.06.24
|
||||||
|
|
||||||
from .decryptor import M3U8_Decryption
|
from .helper import (
|
||||||
from .estimator import M3U8_Ts_Estimator
|
M3U8_Decryption,
|
||||||
from .parser import M3U8_Parser, M3U8_Codec
|
M3U8_Ts_Estimator,
|
||||||
from .url_fixer import M3U8_UrlFix
|
M3U8_Parser,
|
||||||
|
M3U8_Codec,
|
||||||
|
M3U8_UrlFix
|
||||||
|
)
|
6
Src/Lib/M3U8/helper/__init__.py
Normal file
6
Src/Lib/M3U8/helper/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# 02.04.24
|
||||||
|
|
||||||
|
from .decryptor import M3U8_Decryption
|
||||||
|
from .estimator import M3U8_Ts_Estimator
|
||||||
|
from .parser import M3U8_Parser, M3U8_Codec
|
||||||
|
from .url_fixer import M3U8_UrlFix
|
@ -1,15 +1,14 @@
|
|||||||
# 20.04.25
|
# 20.04.25
|
||||||
|
|
||||||
import sys
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
# Internal utilities
|
# Internal utilities
|
||||||
from m3u8 import loads
|
from ..parser import load
|
||||||
|
|
||||||
|
|
||||||
# External libraries
|
# External libraries
|
||||||
import httpx
|
import requests
|
||||||
|
|
||||||
|
|
||||||
# Costant
|
# Costant
|
||||||
@ -373,7 +372,7 @@ class M3U8_Subtitle:
|
|||||||
|
|
||||||
# Send a request to retrieve the subtitle content
|
# Send a request to retrieve the subtitle content
|
||||||
logging.info(f"Download subtitle: {obj_subtitle.get('name')}")
|
logging.info(f"Download subtitle: {obj_subtitle.get('name')}")
|
||||||
response_subitle = httpx.get(obj_subtitle.get('uri'))
|
response_subitle = requests.get(obj_subtitle.get('uri'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Try to extract the VTT URL from the subtitle content
|
# Try to extract the VTT URL from the subtitle content
|
||||||
@ -419,9 +418,10 @@ class M3U8_Parser:
|
|||||||
|
|
||||||
|
|
||||||
# Get obj of the m3u8 text content download, dictionary with video, audio, segments, subtitles
|
# Get obj of the m3u8 text content download, dictionary with video, audio, segments, subtitles
|
||||||
m3u8_obj = loads(raw_content, uri)
|
m3u8_obj = load(raw_content, uri)
|
||||||
|
|
||||||
self.__parse_video_info__(m3u8_obj)
|
self.__parse_video_info__(m3u8_obj)
|
||||||
|
self.__parse_encryption_keys__(m3u8_obj)
|
||||||
self.__parse_subtitles_and_audio__(m3u8_obj)
|
self.__parse_subtitles_and_audio__(m3u8_obj)
|
||||||
self.__parse_segments__(m3u8_obj)
|
self.__parse_segments__(m3u8_obj)
|
||||||
|
|
||||||
@ -516,7 +516,6 @@ class M3U8_Parser:
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error parsing encryption keys: {e}")
|
logging.error(f"Error parsing encryption keys: {e}")
|
||||||
sys.exit(0)
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __parse_subtitles_and_audio__(self, m3u8_obj) -> None:
|
def __parse_subtitles_and_audio__(self, m3u8_obj) -> None:
|
||||||
@ -558,12 +557,8 @@ class M3U8_Parser:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
for segment in m3u8_obj.segments:
|
for segment in m3u8_obj.segments:
|
||||||
|
|
||||||
# Parse key
|
|
||||||
self.__parse_encryption_keys__(segment)
|
|
||||||
|
|
||||||
# Collect all index duration
|
# Collect all index duration
|
||||||
self.duration += segment.duration
|
self.duration += segment.duration
|
||||||
|
|
38
Src/Lib/M3U8/parser/__init__.py
Normal file
38
Src/Lib/M3U8/parser/__init__.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
# 15.04.24
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
# Internal utilities
|
||||||
|
from .model import M3U8
|
||||||
|
|
||||||
|
|
||||||
|
def load(raw_content, uri):
|
||||||
|
"""
|
||||||
|
Parses the content of an M3U8 playlist and returns an M3U8 object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
raw_content (str): The content of the M3U8 playlist as a string.
|
||||||
|
uri (str): The URI of the M3U8 playlist file or stream.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
M3U8: An object representing the parsed M3U8 playlist.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
IOError: If the raw_content is empty or if the URI cannot be accessed.
|
||||||
|
ValueError: If the raw_content is not a valid M3U8 playlist format.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> m3u8_content = "#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:10\n#EXT-X-MEDIA-SEQUENCE:0\n#EXTINF:10.0,\nhttp://example.com/segment0.ts\n#EXTINF:10.0,\nhttp://example.com/segment1.ts\n"
|
||||||
|
>>> uri = "http://example.com/playlist.m3u8"
|
||||||
|
>>> playlist = load(m3u8_content, uri)
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not raw_content:
|
||||||
|
raise IOError("Empty content provided.")
|
||||||
|
|
||||||
|
if not uri:
|
||||||
|
raise IOError("Empty URI provided.")
|
||||||
|
|
||||||
|
base_uri = os.path.dirname(uri)
|
||||||
|
return M3U8(raw_content, base_uri=base_uri)
|
28
Src/Lib/M3U8/parser/_util.py
Normal file
28
Src/Lib/M3U8/parser/_util.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# 19.04.24
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
|
||||||
|
def remove_quotes_parser(*attrs):
|
||||||
|
"""
|
||||||
|
Returns a dictionary mapping attribute names to a function that removes quotes from their values.
|
||||||
|
"""
|
||||||
|
return dict(zip(attrs, itertools.repeat(remove_quotes)))
|
||||||
|
|
||||||
|
|
||||||
|
def remove_quotes(string):
|
||||||
|
"""
|
||||||
|
Removes quotes from a string.
|
||||||
|
"""
|
||||||
|
quotes = ('"', "'")
|
||||||
|
if string and string[0] in quotes and string[-1] in quotes:
|
||||||
|
return string[1:-1]
|
||||||
|
return string
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_attribute(attribute):
|
||||||
|
"""
|
||||||
|
Normalizes an attribute name by converting hyphens to underscores and converting to lowercase.
|
||||||
|
"""
|
||||||
|
return attribute.replace('-', '_').lower().strip()
|
||||||
|
|
358
Src/Lib/M3U8/parser/model.py
Normal file
358
Src/Lib/M3U8/parser/model.py
Normal file
@ -0,0 +1,358 @@
|
|||||||
|
# 15.04.24
|
||||||
|
|
||||||
|
import os
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
|
||||||
|
# Internal utilities
|
||||||
|
from ..parser import parser
|
||||||
|
|
||||||
|
|
||||||
|
# Variable
|
||||||
|
StreamInfo = namedtuple('StreamInfo', ['bandwidth', 'program_id', 'resolution', 'codecs'])
|
||||||
|
Media = namedtuple('Media', ['uri', 'type', 'group_id', 'language', 'name','default', 'autoselect', 'forced', 'characteristics'])
|
||||||
|
|
||||||
|
|
||||||
|
class M3U8:
|
||||||
|
"""
|
||||||
|
Represents a single M3U8 playlist. Should be instantiated with the content as string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- content: the m3u8 content as string
|
||||||
|
- base_path: all urls (key and segments url) will be updated with this base_path,
|
||||||
|
ex: base_path = "http://videoserver.com/hls"
|
||||||
|
- base_uri: uri the playlist comes from. it is propagated to SegmentList and Key
|
||||||
|
ex: http://example.com/path/to
|
||||||
|
|
||||||
|
Attribute:
|
||||||
|
- key: it's a `Key` object, the EXT-X-KEY from m3u8. Or None
|
||||||
|
- segments: a `SegmentList` object, represents the list of `Segment`s from this playlist
|
||||||
|
- is_variant: Returns true if this M3U8 is a variant playlist, with links to other M3U8s with different bitrates.
|
||||||
|
If true, `playlists` is a list of the playlists available, and `iframe_playlists` is a list of the i-frame playlists available.
|
||||||
|
- is_endlist: Returns true if EXT-X-ENDLIST tag present in M3U8.
|
||||||
|
Info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.8
|
||||||
|
- playlists: If this is a variant playlist (`is_variant` is True), returns a list of Playlist objects
|
||||||
|
- iframe_playlists: If this is a variant playlist (`is_variant` is True), returns a list of IFramePlaylist objects
|
||||||
|
- playlist_type: A lower-case string representing the type of the playlist, which can be one of VOD (video on demand) or EVENT.
|
||||||
|
- media: If this is a variant playlist (`is_variant` is True), returns a list of Media objects
|
||||||
|
- target_duration: Returns the EXT-X-TARGETDURATION as an integer
|
||||||
|
Info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.2
|
||||||
|
- media_sequence: Returns the EXT-X-MEDIA-SEQUENCE as an integer
|
||||||
|
Info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.3
|
||||||
|
- program_date_time: Returns the EXT-X-PROGRAM-DATE-TIME as a string
|
||||||
|
Info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.5
|
||||||
|
- version: Return the EXT-X-VERSION as is
|
||||||
|
- allow_cache: Return the EXT-X-ALLOW-CACHE as is
|
||||||
|
- files: Returns an iterable with all files from playlist, in order. This includes segments and key uri, if present.
|
||||||
|
- base_uri: It is a property (getter and setter) used by SegmentList and Key to have absolute URIs.
|
||||||
|
- is_i_frames_only: Returns true if EXT-X-I-FRAMES-ONLY tag present in M3U8.
|
||||||
|
Guide: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.12
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Mapping of simple attributes (obj attribute, parser attribute)
|
||||||
|
SIMPLE_ATTRIBUTES = (
|
||||||
|
('is_variant', 'is_variant'),
|
||||||
|
('is_endlist', 'is_endlist'),
|
||||||
|
('is_i_frames_only', 'is_i_frames_only'),
|
||||||
|
('target_duration', 'targetduration'),
|
||||||
|
('media_sequence', 'media_sequence'),
|
||||||
|
('program_date_time', 'program_date_time'),
|
||||||
|
('version', 'version'),
|
||||||
|
('allow_cache', 'allow_cache'),
|
||||||
|
('playlist_type', 'playlist_type')
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, content=None, base_path=None, base_uri=None):
|
||||||
|
"""
|
||||||
|
Initialize the M3U8 object.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- content: M3U8 content (string).
|
||||||
|
- base_path: Base path for relative URIs (string).
|
||||||
|
- base_uri: Base URI for absolute URIs (string).
|
||||||
|
"""
|
||||||
|
if content is not None:
|
||||||
|
self.data = parser.parse(content)
|
||||||
|
else:
|
||||||
|
self.data = {}
|
||||||
|
self._base_uri = base_uri
|
||||||
|
self.base_path = base_path
|
||||||
|
self._initialize_attributes()
|
||||||
|
|
||||||
|
def _initialize_attributes(self):
|
||||||
|
"""
|
||||||
|
Initialize attributes based on parsed data.
|
||||||
|
"""
|
||||||
|
# Initialize key and segments
|
||||||
|
self.key = Key(base_uri=self.base_uri, **self.data.get('key', {})) if 'key' in self.data else None
|
||||||
|
self.segments = SegmentList([Segment(base_uri=self.base_uri, **params) for params in self.data.get('segments', [])])
|
||||||
|
|
||||||
|
# Initialize simple attributes
|
||||||
|
for attr, param in self.SIMPLE_ATTRIBUTES:
|
||||||
|
setattr(self, attr, self.data.get(param))
|
||||||
|
|
||||||
|
# Initialize files, media, playlists, and iframe_playlists
|
||||||
|
self.files = []
|
||||||
|
if self.key:
|
||||||
|
self.files.append(self.key.uri)
|
||||||
|
self.files.extend(self.segments.uri)
|
||||||
|
|
||||||
|
self.media = [Media(
|
||||||
|
uri = media.get('uri'),
|
||||||
|
type = media.get('type'),
|
||||||
|
group_id = media.get('group_id'),
|
||||||
|
language = media.get('language'),
|
||||||
|
name = media.get('name'),
|
||||||
|
default = media.get('default'),
|
||||||
|
autoselect = media.get('autoselect'),
|
||||||
|
forced = media.get('forced'),
|
||||||
|
characteristics = media.get('characteristics'))
|
||||||
|
for media in self.data.get('media', [])
|
||||||
|
]
|
||||||
|
self.playlists = PlaylistList([Playlist(
|
||||||
|
base_uri = self.base_uri,
|
||||||
|
media = self.media,
|
||||||
|
**playlist
|
||||||
|
)for playlist in self.data.get('playlists', [])
|
||||||
|
])
|
||||||
|
self.iframe_playlists = PlaylistList()
|
||||||
|
for ifr_pl in self.data.get('iframe_playlists', []):
|
||||||
|
self.iframe_playlists.append(
|
||||||
|
IFramePlaylist(
|
||||||
|
base_uri = self.base_uri,
|
||||||
|
uri = ifr_pl['uri'],
|
||||||
|
iframe_stream_info=ifr_pl['iframe_stream_info'])
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_uri(self):
|
||||||
|
"""
|
||||||
|
Get the base URI.
|
||||||
|
"""
|
||||||
|
return self._base_uri
|
||||||
|
|
||||||
|
@base_uri.setter
|
||||||
|
def base_uri(self, new_base_uri):
|
||||||
|
"""
|
||||||
|
Set the base URI.
|
||||||
|
"""
|
||||||
|
self._base_uri = new_base_uri
|
||||||
|
self.segments.base_uri = new_base_uri
|
||||||
|
|
||||||
|
|
||||||
|
class BasePathMixin:
|
||||||
|
"""
|
||||||
|
Mixin class for managing base paths.
|
||||||
|
"""
|
||||||
|
@property
|
||||||
|
def base_path(self):
|
||||||
|
"""
|
||||||
|
Get the base path.
|
||||||
|
"""
|
||||||
|
return os.path.dirname(self.uri)
|
||||||
|
|
||||||
|
@base_path.setter
|
||||||
|
def base_path(self, newbase_path):
|
||||||
|
"""
|
||||||
|
Set the base path.
|
||||||
|
"""
|
||||||
|
if not self.base_path:
|
||||||
|
self.uri = "%s/%s" % (newbase_path, self.uri)
|
||||||
|
self.uri = self.uri.replace(self.base_path, newbase_path)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupedBasePathMixin:
|
||||||
|
"""
|
||||||
|
Mixin class for managing base paths across a group of items.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _set_base_uri(self, new_base_uri):
|
||||||
|
"""
|
||||||
|
Set the base URI for each item in the group.
|
||||||
|
"""
|
||||||
|
for item in self:
|
||||||
|
item.base_uri = new_base_uri
|
||||||
|
|
||||||
|
base_uri = property(None, _set_base_uri)
|
||||||
|
|
||||||
|
def _set_base_path(self, new_base_path):
|
||||||
|
"""
|
||||||
|
Set the base path for each item in the group.
|
||||||
|
"""
|
||||||
|
for item in self:
|
||||||
|
item.base_path = new_base_path
|
||||||
|
|
||||||
|
base_path = property(None, _set_base_path)
|
||||||
|
|
||||||
|
|
||||||
|
class Segment(BasePathMixin):
|
||||||
|
"""
|
||||||
|
Class representing a segment in an M3U8 playlist.
|
||||||
|
Inherits from BasePathMixin for managing base paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, uri, base_uri, program_date_time=None, duration=None,
|
||||||
|
title=None, byterange=None, discontinuity=False, key=None):
|
||||||
|
"""
|
||||||
|
Initialize a Segment object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- uri: URI of the segment.
|
||||||
|
- base_uri: Base URI for the segment.
|
||||||
|
- program_date_time: Returns the EXT-X-PROGRAM-DATE-TIME as a datetime
|
||||||
|
Guide: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.5
|
||||||
|
- duration: Duration of the segment (optional).
|
||||||
|
- title: Title attribute from EXTINF parameter
|
||||||
|
- byterange: Byterange information of the segment (optional).
|
||||||
|
- discontinuity: Returns a boolean indicating if a EXT-X-DISCONTINUITY tag exists
|
||||||
|
Guide: http://tools.ietf.org/html/draft-pantos-http-live-streaming-13#section-3.4.11
|
||||||
|
- key: Key for encryption (optional).
|
||||||
|
"""
|
||||||
|
self.uri = uri
|
||||||
|
self.duration = duration
|
||||||
|
self.title = title
|
||||||
|
self.base_uri = base_uri
|
||||||
|
self.byterange = byterange
|
||||||
|
self.program_date_time = program_date_time
|
||||||
|
self.discontinuity = discontinuity
|
||||||
|
#self.key = key
|
||||||
|
|
||||||
|
|
||||||
|
class SegmentList(list, GroupedBasePathMixin):
|
||||||
|
"""
|
||||||
|
Class representing a list of segments in an M3U8 playlist.
|
||||||
|
Inherits from list and GroupedBasePathMixin for managing base paths across a group of items.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uri(self):
|
||||||
|
"""
|
||||||
|
Get the URI of each segment in the SegmentList.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- List of URIs of segments in the SegmentList.
|
||||||
|
"""
|
||||||
|
return [seg.uri for seg in self]
|
||||||
|
|
||||||
|
|
||||||
|
class Key(BasePathMixin):
|
||||||
|
"""
|
||||||
|
Class representing a key used for encryption in an M3U8 playlist.
|
||||||
|
Inherits from BasePathMixin for managing base paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, method, uri, base_uri, iv=None):
|
||||||
|
"""
|
||||||
|
Initialize a Key object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- method: Encryption method.
|
||||||
|
ex: "AES-128"
|
||||||
|
- uri: URI of the key.
|
||||||
|
ex: "https://priv.example.com/key.php?r=52"
|
||||||
|
- base_uri: Base URI for the key.
|
||||||
|
ex: http://example.com/path/to
|
||||||
|
- iv: Initialization vector (optional).
|
||||||
|
ex: 0X12A
|
||||||
|
"""
|
||||||
|
self.method = method
|
||||||
|
self.uri = uri
|
||||||
|
self.iv = iv
|
||||||
|
self.base_uri = base_uri
|
||||||
|
|
||||||
|
|
||||||
|
class Playlist(BasePathMixin):
|
||||||
|
"""
|
||||||
|
Playlist object representing a link to a variant M3U8 with a specific bitrate.
|
||||||
|
|
||||||
|
More info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.10
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, uri, stream_info, media, base_uri):
|
||||||
|
"""
|
||||||
|
Initialize a Playlist object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- uri: URI of the playlist.
|
||||||
|
- stream_info: is a named tuple containing the attributes: `program_id`,
|
||||||
|
- media: List of Media objects associated with the playlist.
|
||||||
|
- base_uri: Base URI for the playlist.
|
||||||
|
"""
|
||||||
|
self.uri = uri
|
||||||
|
self.base_uri = base_uri
|
||||||
|
|
||||||
|
# Extract resolution information from stream_info
|
||||||
|
resolution = stream_info.get('resolution')
|
||||||
|
if resolution is not None:
|
||||||
|
values = resolution.split('x')
|
||||||
|
resolution_pair = (int(values[0]), int(values[1]))
|
||||||
|
else:
|
||||||
|
resolution_pair = None
|
||||||
|
|
||||||
|
# Create StreamInfo object
|
||||||
|
self.stream_info = StreamInfo(
|
||||||
|
bandwidth = stream_info['bandwidth'],
|
||||||
|
program_id = stream_info.get('program_id'),
|
||||||
|
resolution = resolution_pair,
|
||||||
|
codecs = stream_info.get('codecs')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Filter media based on group ID and media type
|
||||||
|
self.media = []
|
||||||
|
for media_type in ('audio', 'video', 'subtitles'):
|
||||||
|
group_id = stream_info.get(media_type)
|
||||||
|
if group_id:
|
||||||
|
self.media += filter(lambda m: m.group_id == group_id, media)
|
||||||
|
|
||||||
|
|
||||||
|
class IFramePlaylist(BasePathMixin):
|
||||||
|
"""
|
||||||
|
Class representing an I-Frame playlist in an M3U8 playlist.
|
||||||
|
Inherits from BasePathMixin for managing base paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, base_uri, uri, iframe_stream_info):
|
||||||
|
"""
|
||||||
|
Initialize an IFramePlaylist object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- base_uri: Base URI for the I-Frame playlist.
|
||||||
|
- uri: URI of the I-Frame playlist.
|
||||||
|
- iframe_stream_info, is a named tuple containing the attributes:
|
||||||
|
`program_id`, `bandwidth`, `codecs` and `resolution` which is a tuple (w, h) of integers
|
||||||
|
"""
|
||||||
|
self.uri = uri
|
||||||
|
self.base_uri = base_uri
|
||||||
|
|
||||||
|
# Extract resolution information from iframe_stream_info
|
||||||
|
resolution = iframe_stream_info.get('resolution')
|
||||||
|
if resolution is not None:
|
||||||
|
values = resolution.split('x')
|
||||||
|
resolution_pair = (int(values[0]), int(values[1]))
|
||||||
|
else:
|
||||||
|
resolution_pair = None
|
||||||
|
|
||||||
|
# Create StreamInfo object for I-Frame playlist
|
||||||
|
self.iframe_stream_info = StreamInfo(
|
||||||
|
bandwidth = iframe_stream_info.get('bandwidth'),
|
||||||
|
program_id = iframe_stream_info.get('program_id'),
|
||||||
|
resolution = resolution_pair,
|
||||||
|
codecs = iframe_stream_info.get('codecs')
|
||||||
|
)
|
||||||
|
|
||||||
|
class PlaylistList(list, GroupedBasePathMixin):
|
||||||
|
"""
|
||||||
|
Class representing a list of playlists in an M3U8 playlist.
|
||||||
|
Inherits from list and GroupedBasePathMixin for managing base paths across a group of items.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"""
|
||||||
|
Return a string representation of the PlaylistList.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- String representation of the PlaylistList.
|
||||||
|
"""
|
||||||
|
output = [str(playlist) for playlist in self]
|
||||||
|
return '\n'.join(output)
|
338
Src/Lib/M3U8/parser/parser.py
Normal file
338
Src/Lib/M3U8/parser/parser.py
Normal file
@ -0,0 +1,338 @@
|
|||||||
|
# 15.04.24
|
||||||
|
|
||||||
|
import re
|
||||||
|
import logging
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# Internal utilities
|
||||||
|
from ..parser import protocol
|
||||||
|
from ._util import (
|
||||||
|
remove_quotes,
|
||||||
|
remove_quotes_parser,
|
||||||
|
normalize_attribute
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# External utilities
|
||||||
|
from Src.Util._jsonConfig import config_manager
|
||||||
|
|
||||||
|
|
||||||
|
# Variable
|
||||||
|
REMOVE_EMPTY_ROW = config_manager.get_bool('M3U8_PARSER', 'skip_empty_row_playlist')
|
||||||
|
ATTRIBUTELISTPATTERN = re.compile(r'''((?:[^,"']|"[^"]*"|'[^']*')+)''')
|
||||||
|
|
||||||
|
|
||||||
|
def parse(content):
|
||||||
|
"""
|
||||||
|
Given an M3U8 playlist content, parses the content and extracts metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content (str): The M3U8 playlist content.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary containing the parsed metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Initialize data dictionary with default values
|
||||||
|
data = {
|
||||||
|
'is_variant': False,
|
||||||
|
'is_endlist': False,
|
||||||
|
'is_i_frames_only': False,
|
||||||
|
'playlist_type': None,
|
||||||
|
'playlists': [],
|
||||||
|
'iframe_playlists': [],
|
||||||
|
'segments': [],
|
||||||
|
'media': [],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize state dictionary for tracking parsing state
|
||||||
|
state = {
|
||||||
|
'expect_segment': False,
|
||||||
|
'expect_playlist': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Iterate over lines in the content
|
||||||
|
content = content.split("\n")
|
||||||
|
content_length = len(content)
|
||||||
|
i = 0
|
||||||
|
|
||||||
|
while i < content_length:
|
||||||
|
line = content[i]
|
||||||
|
line_stripped = line.strip()
|
||||||
|
is_end = i + 1 == content_length - 2
|
||||||
|
|
||||||
|
if REMOVE_EMPTY_ROW:
|
||||||
|
if i < content_length - 2:
|
||||||
|
actual_row = extract_params(line_stripped)
|
||||||
|
next_row = extract_params(content[i + 2].strip())
|
||||||
|
|
||||||
|
if actual_row is not None and next_row is None and not is_end:
|
||||||
|
logging.info(f"Skip row: {line_stripped}")
|
||||||
|
i += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
if line.startswith(protocol.ext_x_byterange):
|
||||||
|
_parse_byterange(line, state)
|
||||||
|
state['expect_segment'] = True
|
||||||
|
|
||||||
|
elif state['expect_segment']:
|
||||||
|
_parse_ts_chunk(line, data, state)
|
||||||
|
state['expect_segment'] = False
|
||||||
|
|
||||||
|
elif state['expect_playlist']:
|
||||||
|
_parse_variant_playlist(line, data, state)
|
||||||
|
state['expect_playlist'] = False
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_x_targetduration):
|
||||||
|
_parse_simple_parameter(line, data, float)
|
||||||
|
elif line.startswith(protocol.ext_x_media_sequence):
|
||||||
|
_parse_simple_parameter(line, data, int)
|
||||||
|
elif line.startswith(protocol.ext_x_discontinuity):
|
||||||
|
state['discontinuity'] = True
|
||||||
|
elif line.startswith(protocol.ext_x_version):
|
||||||
|
_parse_simple_parameter(line, data)
|
||||||
|
elif line.startswith(protocol.ext_x_allow_cache):
|
||||||
|
_parse_simple_parameter(line, data)
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_x_key):
|
||||||
|
state['current_key'] = _parse_key(line)
|
||||||
|
data['key'] = data.get('key', state['current_key'])
|
||||||
|
|
||||||
|
elif line.startswith(protocol.extinf):
|
||||||
|
_parse_extinf(line, data, state)
|
||||||
|
state['expect_segment'] = True
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_x_stream_inf):
|
||||||
|
state['expect_playlist'] = True
|
||||||
|
_parse_stream_inf(line, data, state)
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_x_i_frame_stream_inf):
|
||||||
|
_parse_i_frame_stream_inf(line, data)
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_x_media):
|
||||||
|
_parse_media(line, data, state)
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_x_playlist_type):
|
||||||
|
_parse_simple_parameter(line, data)
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_i_frames_only):
|
||||||
|
data['is_i_frames_only'] = True
|
||||||
|
|
||||||
|
elif line.startswith(protocol.ext_x_endlist):
|
||||||
|
data['is_endlist'] = True
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def extract_params(line):
|
||||||
|
"""
|
||||||
|
Extracts parameters from a formatted input string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The string containing the parameters to extract.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict or None: A dictionary containing the extracted parameters with their respective values.
|
||||||
|
"""
|
||||||
|
params = {}
|
||||||
|
matches = re.findall(r'([A-Z\-]+)=("[^"]*"|[^",\s]*)', line)
|
||||||
|
if not matches:
|
||||||
|
return None
|
||||||
|
for match in matches:
|
||||||
|
param, value = match
|
||||||
|
params[param] = value.strip('"')
|
||||||
|
return params
|
||||||
|
|
||||||
|
def _parse_key(line):
|
||||||
|
"""
|
||||||
|
Parses the #EXT-X-KEY line and extracts key attributes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The #EXT-X-KEY line from the playlist.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary containing the key attributes.
|
||||||
|
"""
|
||||||
|
params = ATTRIBUTELISTPATTERN.split(line.replace(protocol.ext_x_key + ':', ''))[1::2]
|
||||||
|
key = {}
|
||||||
|
for param in params:
|
||||||
|
name, value = param.split('=', 1)
|
||||||
|
key[normalize_attribute(name)] = remove_quotes(value)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def _parse_extinf(line, data, state):
|
||||||
|
"""
|
||||||
|
Parses the #EXTINF line and extracts segment duration and title.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The #EXTINF line from the playlist.
|
||||||
|
- data (dict): The dictionary to store the parsed data.
|
||||||
|
- state (dict): The parsing state.
|
||||||
|
"""
|
||||||
|
duration, title = line.replace(protocol.extinf + ':', '').split(',')
|
||||||
|
state['segment'] = {'duration': float(duration), 'title': remove_quotes(title)}
|
||||||
|
|
||||||
|
def _parse_ts_chunk(line, data, state):
|
||||||
|
"""
|
||||||
|
Parses a segment URI line and adds it to the segment list.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
line (str): The segment URI line from the playlist.
|
||||||
|
data (dict): The dictionary to store the parsed data.
|
||||||
|
state (dict): The parsing state.
|
||||||
|
"""
|
||||||
|
segment = state.pop('segment')
|
||||||
|
if state.get('current_program_date_time'):
|
||||||
|
segment['program_date_time'] = state['current_program_date_time']
|
||||||
|
state['current_program_date_time'] += datetime.timedelta(seconds=segment['duration'])
|
||||||
|
segment['uri'] = line
|
||||||
|
segment['discontinuity'] = state.pop('discontinuity', False)
|
||||||
|
if state.get('current_key'):
|
||||||
|
segment['key'] = state['current_key']
|
||||||
|
data['segments'].append(segment)
|
||||||
|
|
||||||
|
def _parse_attribute_list(prefix, line, atribute_parser):
|
||||||
|
"""
|
||||||
|
Parses a line containing a list of attributes and their values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- prefix (str): The prefix to identify the line.
|
||||||
|
- line (str): The line containing the attributes.
|
||||||
|
- atribute_parser (dict): A dictionary mapping attribute names to parsing functions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary containing the parsed attributes.
|
||||||
|
"""
|
||||||
|
params = ATTRIBUTELISTPATTERN.split(line.replace(prefix + ':', ''))[1::2]
|
||||||
|
|
||||||
|
attributes = {}
|
||||||
|
for param in params:
|
||||||
|
name, value = param.split('=', 1)
|
||||||
|
name = normalize_attribute(name)
|
||||||
|
|
||||||
|
if name in atribute_parser:
|
||||||
|
value = atribute_parser[name](value)
|
||||||
|
|
||||||
|
attributes[name] = value
|
||||||
|
|
||||||
|
return attributes
|
||||||
|
|
||||||
|
def _parse_stream_inf(line, data, state):
|
||||||
|
"""
|
||||||
|
Parses the #EXT-X-STREAM-INF line and extracts stream information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The #EXT-X-STREAM-INF line from the playlist.
|
||||||
|
- data (dict): The dictionary to store the parsed data.
|
||||||
|
- state (dict): The parsing state.
|
||||||
|
"""
|
||||||
|
data['is_variant'] = True
|
||||||
|
atribute_parser = remove_quotes_parser('codecs', 'audio', 'video', 'subtitles')
|
||||||
|
atribute_parser["program_id"] = int
|
||||||
|
atribute_parser["bandwidth"] = int
|
||||||
|
state['stream_info'] = _parse_attribute_list(protocol.ext_x_stream_inf, line, atribute_parser)
|
||||||
|
|
||||||
|
def _parse_i_frame_stream_inf(line, data):
|
||||||
|
"""
|
||||||
|
Parses the #EXT-X-I-FRAME-STREAM-INF line and extracts I-frame stream information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The #EXT-X-I-FRAME-STREAM-INF line from the playlist.
|
||||||
|
- data (dict): The dictionary to store the parsed data.
|
||||||
|
"""
|
||||||
|
atribute_parser = remove_quotes_parser('codecs', 'uri')
|
||||||
|
atribute_parser["program_id"] = int
|
||||||
|
atribute_parser["bandwidth"] = int
|
||||||
|
iframe_stream_info = _parse_attribute_list(protocol.ext_x_i_frame_stream_inf, line, atribute_parser)
|
||||||
|
iframe_playlist = {'uri': iframe_stream_info.pop('uri'),
|
||||||
|
'iframe_stream_info': iframe_stream_info}
|
||||||
|
|
||||||
|
data['iframe_playlists'].append(iframe_playlist)
|
||||||
|
|
||||||
|
def _parse_media(line, data, state):
|
||||||
|
"""
|
||||||
|
Parses the #EXT-X-MEDIA line and extracts media attributes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The #EXT-X-MEDIA line from the playlist.
|
||||||
|
- data (dict): The dictionary to store the parsed data.
|
||||||
|
- state (dict): The parsing state.
|
||||||
|
"""
|
||||||
|
quoted = remove_quotes_parser('uri', 'group_id', 'language', 'name', 'characteristics')
|
||||||
|
media = _parse_attribute_list(protocol.ext_x_media, line, quoted)
|
||||||
|
data['media'].append(media)
|
||||||
|
|
||||||
|
def _parse_variant_playlist(line, data, state):
|
||||||
|
"""
|
||||||
|
Parses a variant playlist line and extracts playlist information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The variant playlist line from the playlist.
|
||||||
|
- data (dict): The dictionary to store the parsed data.
|
||||||
|
- state (dict): The parsing state.
|
||||||
|
"""
|
||||||
|
playlist = {'uri': line, 'stream_info': state.pop('stream_info')}
|
||||||
|
|
||||||
|
data['playlists'].append(playlist)
|
||||||
|
|
||||||
|
def _parse_byterange(line, state):
|
||||||
|
"""
|
||||||
|
Parses the #EXT-X-BYTERANGE line and extracts byte range information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The #EXT-X-BYTERANGE line from the playlist.
|
||||||
|
- state (dict): The parsing state.
|
||||||
|
"""
|
||||||
|
state['segment']['byterange'] = line.replace(protocol.ext_x_byterange + ':', '')
|
||||||
|
|
||||||
|
def _parse_simple_parameter_raw_value(line, cast_to=str, normalize=False):
|
||||||
|
"""
|
||||||
|
Parses a line containing a simple parameter and its value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The line containing the parameter and its value.
|
||||||
|
- cast_to (type): The type to which the value should be cast.
|
||||||
|
- normalize (bool): Whether to normalize the parameter name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: A tuple containing the parameter name and its value.
|
||||||
|
"""
|
||||||
|
param, value = line.split(':', 1)
|
||||||
|
param = normalize_attribute(param.replace('#EXT-X-', ''))
|
||||||
|
if normalize:
|
||||||
|
value = normalize_attribute(value)
|
||||||
|
return param, cast_to(value)
|
||||||
|
|
||||||
|
def _parse_and_set_simple_parameter_raw_value(line, data, cast_to=str, normalize=False):
|
||||||
|
"""
|
||||||
|
Parses a line containing a simple parameter and its value, and sets it in the data dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- line (str): The line containing the parameter and its value.
|
||||||
|
- data (dict): The dictionary to store the parsed data.
|
||||||
|
- cast_to (type): The type to which the value should be cast.
|
||||||
|
- normalize (bool): Whether to normalize the parameter name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The parsed value.
|
||||||
|
"""
|
||||||
|
param, value = _parse_simple_parameter_raw_value(line, cast_to, normalize)
|
||||||
|
data[param] = value
|
||||||
|
return data[param]
|
||||||
|
|
||||||
|
def _parse_simple_parameter(line, data, cast_to=str):
|
||||||
|
"""
|
||||||
|
Parses a line containing a simple parameter and its value, and sets it in the data dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
line (str): The line containing the parameter and its value.
|
||||||
|
data (dict): The dictionary to store the parsed data.
|
||||||
|
cast_to (type): The type to which the value should be cast.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The parsed value.
|
||||||
|
"""
|
||||||
|
return _parse_and_set_simple_parameter_raw_value(line, data, cast_to, True)
|
17
Src/Lib/M3U8/parser/protocol.py
Normal file
17
Src/Lib/M3U8/parser/protocol.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# 15.04.24
|
||||||
|
|
||||||
|
ext_x_targetduration = '#EXT-X-TARGETDURATION'
|
||||||
|
ext_x_media_sequence = '#EXT-X-MEDIA-SEQUENCE'
|
||||||
|
ext_x_program_date_time = '#EXT-X-PROGRAM-DATE-TIME'
|
||||||
|
ext_x_media = '#EXT-X-MEDIA'
|
||||||
|
ext_x_playlist_type = '#EXT-X-PLAYLIST-TYPE'
|
||||||
|
ext_x_key = '#EXT-X-KEY'
|
||||||
|
ext_x_stream_inf = '#EXT-X-STREAM-INF'
|
||||||
|
ext_x_version = '#EXT-X-VERSION'
|
||||||
|
ext_x_allow_cache = '#EXT-X-ALLOW-CACHE'
|
||||||
|
ext_x_endlist = '#EXT-X-ENDLIST'
|
||||||
|
extinf = '#EXTINF'
|
||||||
|
ext_i_frames_only = '#EXT-X-I-FRAMES-ONLY'
|
||||||
|
ext_x_byterange = '#EXT-X-BYTERANGE'
|
||||||
|
ext_x_i_frame_stream_inf = '#EXT-X-I-FRAME-STREAM-INF'
|
||||||
|
ext_x_discontinuity = '#EXT-X-DISCONTINUITY'
|
@ -10,7 +10,7 @@ from Src.Util.console import console
|
|||||||
|
|
||||||
|
|
||||||
# External library
|
# External library
|
||||||
import httpx
|
import requests
|
||||||
|
|
||||||
|
|
||||||
# Variable
|
# Variable
|
||||||
@ -28,10 +28,9 @@ def update():
|
|||||||
|
|
||||||
# Make the GitHub API requests and handle potential errors
|
# Make the GitHub API requests and handle potential errors
|
||||||
try:
|
try:
|
||||||
response_reposity = httpx.get(f"https://api.github.com/repos/{repo_user}/{repo_name}").json()
|
response_reposity = requests.get(f"https://api.github.com/repos/{repo_user}/{repo_name}").json()
|
||||||
response_releases = httpx.get(f"https://api.github.com/repos/{repo_user}/{repo_name}/releases").json()
|
response_releases = requests.get(f"https://api.github.com/repos/{repo_user}/{repo_name}/releases").json()
|
||||||
|
except requests.RequestException as e:
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]Error accessing GitHub API: {e}")
|
console.print(f"[red]Error accessing GitHub API: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# 29.04.24
|
# 29.04.24
|
||||||
|
|
||||||
import httpx
|
import requests
|
||||||
import json
|
import json
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
@ -22,10 +22,10 @@ preference_registry = ['Verisign', 'KSregistry', 'KNET']
|
|||||||
def scrape_new_gtld_applications(url):
|
def scrape_new_gtld_applications(url):
|
||||||
|
|
||||||
# Send a GET request to the URL
|
# Send a GET request to the URL
|
||||||
response = httpx.get(url)
|
response = requests.get(url)
|
||||||
|
|
||||||
# Check if the response is successful
|
# Check if the response is successful
|
||||||
if response.status_code == 200:
|
if response.ok:
|
||||||
|
|
||||||
# Parse the HTML content of the page
|
# Parse the HTML content of the page
|
||||||
soup = BeautifulSoup(response.content, 'html.parser')
|
soup = BeautifulSoup(response.content, 'html.parser')
|
||||||
|
11
config.json
11
config.json
@ -13,7 +13,8 @@
|
|||||||
"timeout": 5,
|
"timeout": 5,
|
||||||
"verify_ssl": false,
|
"verify_ssl": false,
|
||||||
"index": {"user-agent": ""},
|
"index": {"user-agent": ""},
|
||||||
"segments": {"user-agent": ""}
|
"segments": { "user-agent": ""},
|
||||||
|
"proxy": []
|
||||||
},
|
},
|
||||||
"M3U8_DOWNLOAD": {
|
"M3U8_DOWNLOAD": {
|
||||||
"tdqm_workers": 2,
|
"tdqm_workers": 2,
|
||||||
@ -31,10 +32,10 @@
|
|||||||
},
|
},
|
||||||
"M3U8_CONVERSION": {
|
"M3U8_CONVERSION": {
|
||||||
"use_codec": false,
|
"use_codec": false,
|
||||||
"use_vcodec": true,
|
"use_vcodec": true,
|
||||||
"use_acodec": true,
|
"use_acodec": true,
|
||||||
"use_bitrate": true,
|
"use_bitrate": true,
|
||||||
"use_gpu": false,
|
"use_gpu": true,
|
||||||
"default_preset": "ultrafast",
|
"default_preset": "ultrafast",
|
||||||
"check_output_after_ffmpeg": false
|
"check_output_after_ffmpeg": false
|
||||||
},
|
},
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
httpx
|
requests
|
||||||
bs4
|
bs4
|
||||||
rich
|
rich
|
||||||
tqdm
|
tqdm
|
||||||
m3u8
|
|
||||||
unidecode
|
unidecode
|
||||||
fake-useragent
|
fake-useragent
|
4
run.py
4
run.py
@ -68,10 +68,10 @@ def initialize():
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# Attempting GitHub update
|
# Attempting GitHub update
|
||||||
"""try:
|
try:
|
||||||
git_update()
|
git_update()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.print(f"[blue]Req github [white]=> [red]Failed: {e}")"""
|
console.print(f"[blue]Req github [white]=> [red]Failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
Loading…
x
Reference in New Issue
Block a user