* Update build.yml

* Update site.py

* Update requirements.txt

* Update os.py

* Update run.py

* Update global_search.py

* Update hdplayer.py

* Create index.html

* Create script.js

* Create style.css

* Create pages.yml

* Some fix
This commit is contained in:
None 2025-05-10 09:17:37 +02:00 committed by GitHub
parent 32197a3c5d
commit faf83765d0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 647 additions and 45 deletions

329
.github/.site/css/style.css vendored Normal file
View File

@ -0,0 +1,329 @@
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap');
:root {
--primary-color: #8c52ff;
--secondary-color: #6930c3;
--accent-color: #00e5ff;
--background-color: #121212;
--card-background: #1e1e1e;
--text-color: #f8f9fa;
--shadow-color: rgba(0, 0, 0, 0.25);
--card-hover: #2a2a2a;
--border-color: #333333;
--header-bg: rgba(18, 18, 18, 0.95);
}
* {
margin: 0;
padding: 0;
box-sizing: border-box;
transition: all 0.2s ease;
}
body {
font-family: 'Inter', 'Segoe UI', sans-serif;
background-color: var(--background-color);
color: var(--text-color);
line-height: 1.6;
min-height: 100vh;
display: flex;
flex-direction: column;
}
header {
background-color: var(--header-bg);
backdrop-filter: blur(10px);
position: fixed;
width: 100%;
padding: 15px 0;
z-index: 1000;
box-shadow: 0 2px 12px var(--shadow-color);
}
.container {
max-width: 1400px;
margin: 0 auto;
padding: 20px;
flex: 1;
}
.site-grid {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(300px, 1fr));
gap: 24px;
padding: 2rem 0;
}
.site-item {
min-height: 280px;
background-color: var(--card-background);
border-radius: 16px;
padding: 30px;
box-shadow: 0 6px 20px var(--shadow-color);
transition: transform 0.3s ease, box-shadow 0.3s ease;
display: flex;
flex-direction: column;
align-items: center;
border: 1px solid var(--border-color);
position: relative;
overflow: hidden;
}
.site-item::before {
content: '';
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 4px;
background: linear-gradient(90deg, var(--primary-color), var(--accent-color));
}
.site-item:hover {
transform: translateY(-5px);
box-shadow: 0 12px 30px var(--shadow-color);
}
.site-item img {
width: 80px;
height: 80px;
margin-bottom: 1.5rem;
border-radius: 16px;
object-fit: cover;
border: 2px solid var(--border-color);
}
.site-content {
text-align: center;
width: 100%;
}
.site-item h3 {
font-size: 1.4rem;
font-weight: 600;
margin-bottom: 0.5rem;
color: var(--primary-color);
}
.domain {
color: var(--text-color);
opacity: 0.8;
font-size: 0.9rem;
margin-bottom: 1.5rem;
word-break: break-all;
}
.site-item a {
margin-top: 1rem;
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color));
color: white;
text-decoration: none;
font-weight: 500;
padding: 12px 28px;
border-radius: 8px;
width: fit-content;
margin: 0 auto;
display: flex;
align-items: center;
gap: 8px;
}
.site-item a:hover {
opacity: 0.9;
transform: translateY(-2px);
}
footer {
background: var(--card-background);
border-top: 1px solid var(--border-color);
margin-top: auto;
padding: 40px 20px;
position: relative;
}
.footer-content {
max-width: 1200px;
margin: 0 auto;
display: grid;
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
gap: 30px;
position: relative;
padding: 10px 0;
}
.footer-section {
padding: 20px;
border-radius: 12px;
transition: transform 0.3s ease, background-color 0.3s ease;
background-color: var(--card-background);
border: 1px solid var(--border-color);
}
.footer-section:hover {
transform: translateY(-5px);
background-color: var(--card-hover);
}
.footer-title {
color: var(--accent-color);
font-size: 1.3rem;
margin-bottom: 1.5rem;
padding-bottom: 0.5rem;
position: relative;
letter-spacing: 0.5px;
}
.footer-title::after {
content: '';
position: absolute;
bottom: 0;
left: 0;
width: 60px;
height: 3px;
border-radius: 2px;
background: linear-gradient(90deg, var(--primary-color), var(--accent-color));
}
.footer-links {
list-style: none;
}
.footer-links li {
margin-bottom: 0.8rem;
}
.footer-links a {
color: var(--text-color);
text-decoration: none;
display: flex;
align-items: center;
gap: 8px;
opacity: 0.8;
transition: all 0.3s ease;
padding: 8px 12px;
border-radius: 8px;
background-color: transparent;
}
.footer-links a:hover {
opacity: 1;
color: var(--accent-color);
transform: translateX(8px);
background-color: rgba(140, 82, 255, 0.1);
}
.footer-links i {
width: 20px;
text-align: center;
font-size: 1.2rem;
color: var(--primary-color);
transition: transform 0.3s ease;
}
.footer-links a:hover i {
transform: scale(1.2);
}
.github-stats {
display: flex;
gap: 10px;
margin-top: 10px;
font-size: 0.8rem;
}
.github-badge {
background-color: var(--background-color);
padding: 4px 8px;
border-radius: 4px;
display: flex;
align-items: center;
gap: 4px;
}
.github-badge i {
color: var(--accent-color);
}
.footer-description {
margin-top: 15px;
font-size: 0.9rem;
color: var(--text-color);
opacity: 0.8;
line-height: 1.5;
}
.update-info {
text-align: center;
margin-top: 30px;
padding-top: 30px;
border-top: 1px solid var(--border-color);
}
.update-note {
color: var(--accent-color);
font-size: 0.9rem;
opacity: 0.9;
}
@media (max-width: 768px) {
.footer-content {
grid-template-columns: 1fr;
text-align: center;
}
.footer-title::after {
left: 50%;
transform: translateX(-50%);
}
.footer-links a {
justify-content: center;
}
.footer-links a:hover {
transform: translateY(-5px);
}
.footer-section {
margin-bottom: 20px;
}
}
.loader {
border: 3px solid var(--border-color);
border-top: 3px solid var(--primary-color);
border-right: 3px solid var(--accent-color);
border-radius: 50%;
width: 50px;
height: 50px;
animation: spin 1s linear infinite;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
@media (max-width: 768px) {
.site-item {
padding: 25px;
}
.site-item img {
width: 70px;
height: 70px;
}
}
.old-domain, .time-change {
color: var(--text-color);
opacity: 0.7;
font-size: 0.85rem;
margin-bottom: 0.5rem;
word-break: break-all;
}
.label {
color: var(--accent-color);
font-weight: 500;
}

74
.github/.site/index.html vendored Normal file
View File

@ -0,0 +1,74 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Streaming Directory</title>
<link rel="stylesheet" href="css/style.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css">
</head>
<body>
<main>
<section class="container">
<div class="sites-container">
<div id="site-list" class="site-grid">
<div class="loader"></div>
</div>
</div>
</section>
</main>
<footer>
<div class="footer-content">
<div class="footer-section">
<h3 class="footer-title">Repository</h3>
<ul class="footer-links">
<li>
<a href="https://github.com/Arrowar/StreamingCommunity" target="_blank" rel="noopener noreferrer">
<i class="fab fa-github"></i>
Project GitHub
</a>
</li>
</ul>
<p class="footer-description">
An updated collection of streaming sites. Contribute to the project on GitHub!
</p>
</div>
<div class="footer-section">
<h3 class="footer-title">Author</h3>
<ul class="footer-links">
<li>
<a href="https://github.com/Arrowar" target="_blank" rel="noopener noreferrer">
<i class="fas fa-user-tie"></i>
Arrowar Profile
</a>
</li>
</ul>
<p class="footer-description">
Developer of the project.
</p>
</div>
<div class="footer-section">
<h3 class="footer-title">Info</h3>
<ul class="footer-links">
<li>
<span class="update-note">
<i class="fas fa-sync-alt"></i>
Domains updated once every hour
</span>
</li>
</ul>
<p class="footer-description">
All domains are automatically updated once every hour.
</p>
</div>
</div>
</footer>
<script src="js/script.js"></script>
</body>
</html>

88
.github/.site/js/script.js vendored Normal file
View File

@ -0,0 +1,88 @@
const supabaseUrl = 'https://zvfngpoxwrgswnzytadh.supabase.co';
const supabaseKey = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Inp2Zm5ncG94d3Jnc3duenl0YWRoIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NDAxNTIxNjMsImV4cCI6MjA1NTcyODE2M30.FNTCCMwi0QaKjOu8gtZsT5yQttUW8QiDDGXmzkn89QE';
async function loadSiteData() {
try {
const siteList = document.getElementById('site-list');
const headers = {
'apikey': supabaseKey,
'Authorization': `Bearer ${supabaseKey}`,
'Content-Type': 'application/json'
};
const response = await fetch(`${supabaseUrl}/rest/v1/public`, {
method: 'GET',
headers: headers
});
if (!response.ok) throw new Error(`HTTP error! Status: ${response.status}`);
const data = await response.json();
siteList.innerHTML = '';
if (data && data.length > 0) {
const configSite = data[0].data;
for (const siteName in configSite) {
const site = configSite[siteName];
const siteItem = document.createElement('div');
siteItem.className = 'site-item';
const siteIcon = document.createElement('img');
siteIcon.src = `https://t2.gstatic.com/faviconV2?client=SOCIAL&type=FAVICON&fallback_opts=TYPE,SIZE,URL&url=${site.full_url}&size=128`;
siteIcon.alt = `${siteName} icon`;
siteIcon.onerror = function() {
this.src = 'data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg" width="100" height="100" viewBox="0 0 24 24" fill="none" stroke="%238c52ff" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M21 12.79A9 9 0 1 1 11.21 3 7 7 0 0 0 21 12.79z"></path></svg>';
};
const siteContent = document.createElement('div');
siteContent.className = 'site-content';
const siteTitle = document.createElement('h3');
siteTitle.textContent = siteName;
if (site.old_domain) {
const oldDomain = document.createElement('p');
oldDomain.className = 'old-domain';
oldDomain.innerHTML = `<span class="label">Previous domain:</span> ${site.old_domain.replace(/^https?:\/\//, '')}`;
siteContent.appendChild(oldDomain);
}
if (site.time_change) {
const timeChange = document.createElement('p');
timeChange.className = 'time-change';
const changeDate = new Date(site.time_change);
const dateString = isNaN(changeDate) ? site.time_change : changeDate.toLocaleDateString();
timeChange.innerHTML = `<span class="label">Updated:</span> ${dateString}`;
siteContent.appendChild(timeChange);
}
const siteLink = document.createElement('a');
siteLink.href = site.full_url;
siteLink.target = '_blank';
siteLink.innerHTML = 'Visit <i class="fas fa-external-link-alt"></i>';
siteLink.rel = 'noopener noreferrer';
siteContent.appendChild(siteTitle);
siteContent.appendChild(siteLink);
siteItem.appendChild(siteIcon);
siteItem.appendChild(siteContent);
siteList.appendChild(siteItem);
}
} else {
siteList.innerHTML = '<div class="no-sites">No sites available</div>';
}
} catch (error) {
console.error('Errore:', error);
siteList.innerHTML = `
<div class="error-message">
<p>Errore nel caricamento</p>
<button onclick="loadSiteData()" class="retry-button">Riprova</button>
</div>
`;
}
}
document.addEventListener('DOMContentLoaded', loadSiteData);

View File

@ -98,6 +98,7 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
python -m pip install --upgrade certifi
python -m pip install -r requirements.txt python -m pip install -r requirements.txt
python -m pip install pyinstaller python -m pip install pyinstaller
@ -137,4 +138,4 @@ jobs:
tag_name: ${{ env.latest_tag }} tag_name: ${{ env.latest_tag }}
files: dist/${{ matrix.executable }} files: dist/${{ matrix.executable }}
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

45
.github/workflows/pages.yml vendored Normal file
View File

@ -0,0 +1,45 @@
on:
push:
branches: ["main"]
workflow_dispatch:
permissions:
contents: read
pages: write
id-token: write
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Copy site files
run: |
mkdir -p _site
cp -r .site/* _site/
ls -la _site/
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: _site
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@ -2,14 +2,13 @@
import re import re
# External library # External library
import httpx import httpx
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
# Internal utilities # Internal utilities
from StreamingCommunity.Util.headers import get_headers from StreamingCommunity.Util.headers import get_userAgent
from StreamingCommunity.Util.config_json import config_manager from StreamingCommunity.Util.config_json import config_manager
@ -19,7 +18,7 @@ MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
class VideoSource: class VideoSource:
def __init__(self, proxy=None): def __init__(self, proxy=None):
self.client = httpx.Client(headers=get_headers(), timeout=MAX_TIMEOUT, proxy=proxy) self.client = httpx.Client(headers={'user-agent': get_userAgent()}, timeout=MAX_TIMEOUT, proxy=proxy)
def extractLinkHdPlayer(self, response): def extractLinkHdPlayer(self, response):
"""Extract iframe source from the page.""" """Extract iframe source from the page."""
@ -34,6 +33,9 @@ class VideoSource:
Extract m3u8 URL from hdPlayer page. Extract m3u8 URL from hdPlayer page.
""" """
try: try:
base_domain = re.match(r'https?://(?:www\.)?([^/]+)', page_url).group(0)
self.client.headers.update({'referer': base_domain})
# Get the page content # Get the page content
response = self.client.get(page_url) response = self.client.get(page_url)
@ -41,19 +43,17 @@ class VideoSource:
iframe_url = self.extractLinkHdPlayer(response) iframe_url = self.extractLinkHdPlayer(response)
if not iframe_url: if not iframe_url:
return None return None
# Get HDPlayer page content # Get HDPlayer page content
response_hdplayer = self.client.get(iframe_url) response_hdplayer = self.client.get(iframe_url)
if response_hdplayer.status_code != 200: if response_hdplayer.status_code != 200:
return None return None
sources_pattern = r'file:"([^"]+)"'
match = re.search(sources_pattern, response_hdplayer.text)
soup = BeautifulSoup(response_hdplayer.text, 'html.parser') if match:
return match.group(1)
# Find m3u8 URL in scripts
for script in soup.find_all("script"):
match = re.search(r'sources:\s*\[\{\s*file:\s*"([^"]+)"', script.text)
if match:
return match.group(1)
return None return None
@ -62,4 +62,4 @@ class VideoSource:
return None return None
finally: finally:
self.client.close() self.client.close()

View File

@ -31,7 +31,11 @@ def get_session_and_csrf() -> dict:
Get the session ID and CSRF token from the website's cookies and HTML meta data. Get the session ID and CSRF token from the website's cookies and HTML meta data.
""" """
# Send an initial GET request to the website # Send an initial GET request to the website
response = httpx.get(site_constant.FULL_URL, headers=get_headers()) response = httpx.get(
site_constant.FULL_URL,
headers=get_headers(),
verify=False
)
# Extract the sessionId from the cookies # Extract the sessionId from the cookies
session_id = response.cookies.get('sessionId') session_id = response.cookies.get('sessionId')
@ -114,4 +118,4 @@ def title_search(query: str) -> int:
print(f"Error parsing a film entry: {e}") print(f"Error parsing a film entry: {e}")
# Return the length of media search manager # Return the length of media search manager
return media_search_manager.get_length() return media_search_manager.get_length()

View File

@ -103,7 +103,7 @@ class ProxyFinder:
return proxies return proxies
def fetch_proxies_from_sources(self) -> list: def fetch_proxies_from_sources(self) -> list:
print("[cyan]Fetching proxies from sources...[/cyan]") #print("[cyan]Fetching proxies from sources...[/cyan]")
with ThreadPoolExecutor(max_workers=3) as executor: with ThreadPoolExecutor(max_workers=3) as executor:
proxyscrape_future = executor.submit(self.fetch_proxyscrape) proxyscrape_future = executor.submit(self.fetch_proxyscrape)
geonode_future = executor.submit(self.fetch_geonode) geonode_future = executor.submit(self.fetch_geonode)

View File

@ -3,7 +3,7 @@
import os import os
import sys import sys
import time import time
import asyncio
# External library # External library
import httpx import httpx
@ -24,32 +24,33 @@ else:
base_path = os.path.dirname(__file__) base_path = os.path.dirname(__file__)
console = Console() console = Console()
async def fetch_github_data(client, url):
"""Helper function to fetch data from GitHub API"""
response = await client.get(
url=url,
headers={'user-agent': get_userAgent()},
timeout=config_manager.get_int("REQUESTS", "timeout"),
follow_redirects=True
)
return response.json()
async def async_github_requests():
"""Make concurrent GitHub API requests"""
async with httpx.AsyncClient() as client:
tasks = [
fetch_github_data(client, f"https://api.github.com/repos/{__author__}/{__title__}"),
fetch_github_data(client, f"https://api.github.com/repos/{__author__}/{__title__}/releases"),
fetch_github_data(client, f"https://api.github.com/repos/{__author__}/{__title__}/commits")
]
return await asyncio.gather(*tasks)
def update(): def update():
""" """
Check for updates on GitHub and display relevant information. Check for updates on GitHub and display relevant information.
""" """
try: try:
response_reposity = httpx.get( # Run async requests concurrently
url=f"https://api.github.com/repos/{__author__}/{__title__}", response_reposity, response_releases, response_commits = asyncio.run(async_github_requests())
headers={'user-agent': get_userAgent()},
timeout=config_manager.get_int("REQUESTS", "timeout"),
follow_redirects=True
).json()
response_releases = httpx.get(
url=f"https://api.github.com/repos/{__author__}/{__title__}/releases",
headers={'user-agent': get_userAgent()},
timeout=config_manager.get_int("REQUESTS", "timeout"),
follow_redirects=True
).json()
response_commits = httpx.get(
url=f"https://api.github.com/repos/{__author__}/{__title__}/commits",
headers={'user-agent': get_userAgent()},
timeout=config_manager.get_int("REQUESTS", "timeout"),
follow_redirects=True
).json()
except Exception as e: except Exception as e:
console.print(f"[red]Error accessing GitHub API: {e}") console.print(f"[red]Error accessing GitHub API: {e}")
@ -92,4 +93,4 @@ def update():
console.print(f"\n[red]{__title__} has been downloaded [yellow]{total_download_count} [red]times, but only [yellow]{percentual_stars}% [red]of users have starred it.\n\ console.print(f"\n[red]{__title__} has been downloaded [yellow]{total_download_count} [red]times, but only [yellow]{percentual_stars}% [red]of users have starred it.\n\
[cyan]Help the repository grow today by leaving a [yellow]star [cyan]and [yellow]sharing [cyan]it with others online!") [cyan]Help the repository grow today by leaving a [yellow]star [cyan]and [yellow]sharing [cyan]it with others online!")
time.sleep(3) time.sleep(4)

View File

@ -19,6 +19,7 @@ from unidecode import unidecode
from rich.console import Console from rich.console import Console
from rich.prompt import Prompt from rich.prompt import Prompt
from pathvalidate import sanitize_filename, sanitize_filepath from pathvalidate import sanitize_filename, sanitize_filepath
from dns.resolver import dns
# Internal utilities # Internal utilities
@ -282,6 +283,43 @@ class InternManager():
else: else:
return f"{bytes / (1024 * 1024):.2f} MB/s" return f"{bytes / (1024 * 1024):.2f} MB/s"
def check_dns_provider(self):
"""
Check if the system's current DNS server matches any known DNS providers.
Returns:
bool: True if the current DNS server matches a known provider,
False if no match is found or in case of errors
"""
dns_providers = {
"Cloudflare": ["1.1.1.1", "1.0.0.1"],
"Google": ["8.8.8.8", "8.8.4.4"],
"OpenDNS": ["208.67.222.222", "208.67.220.220"],
"Quad9": ["9.9.9.9", "149.112.112.112"],
"AdGuard": ["94.140.14.14", "94.140.15.15"],
"Comodo": ["8.26.56.26", "8.20.247.20"],
"Level3": ["209.244.0.3", "209.244.0.4"],
"Norton": ["199.85.126.10", "199.85.127.10"],
"CleanBrowsing": ["185.228.168.9", "185.228.169.9"],
"Yandex": ["77.88.8.8", "77.88.8.1"]
}
try:
resolver = dns.resolver.Resolver()
nameservers = resolver.nameservers
if not nameservers:
return False
for server in nameservers:
for provider, ips in dns_providers.items():
if server in ips:
return True
return False
except Exception:
return False
class OsSummary: class OsSummary:
def __init__(self): def __init__(self):

View File

@ -58,7 +58,10 @@ def load_search_functions():
# Get 'indice' from the module # Get 'indice' from the module
indice = getattr(mod, 'indice', 0) indice = getattr(mod, 'indice', 0)
use_for = getattr(mod, '_useFor', 'other') use_for = getattr(mod, '_useFor', 'other')
modules.append((module_name, indice, use_for)) priority = getattr(mod, '_priority', 0)
if priority == 0:
modules.append((module_name, indice, use_for))
except Exception as e: except Exception as e:
console.print(f"[red]Failed to import module {module_name}: {str(e)}") console.print(f"[red]Failed to import module {module_name}: {str(e)}")
@ -296,17 +299,26 @@ def process_selected_item(selected_item, search_functions):
console.print(f"\n[bold green]Processing selection from:[/bold green] {selected_item.get('source')}") console.print(f"\n[bold green]Processing selection from:[/bold green] {selected_item.get('source')}")
# Extract necessary information to pass to the site's search function # Extract necessary information to pass to the site's search function
item_id = selected_item.get('id', selected_item.get('media_id')) item_id = None
for id_field in ['id', 'media_id', 'ID', 'item_id', 'url']:
item_id = selected_item.get(id_field)
if item_id:
break
item_type = selected_item.get('type', selected_item.get('media_type', 'unknown')) item_type = selected_item.get('type', selected_item.get('media_type', 'unknown'))
item_title = selected_item.get('title', selected_item.get('name', 'Unknown')) item_title = selected_item.get('title', selected_item.get('name', 'Unknown'))
if item_id: if item_id:
console.print(f"[bold green]Selected item:[/bold green] {item_title} (ID: {item_id}, Type: {item_type})") console.print(f"[bold green]Selected item:[/bold green] {item_title} (ID: {item_id}, Type: {item_type})")
# Call the site's search function with direct_item parameter to process download
try: try:
func(direct_item=selected_item) func(direct_item=selected_item)
except Exception as e: except Exception as e:
console.print(f"[bold red]Error processing download:[/bold red] {str(e)}") console.print(f"[bold red]Error processing download:[/bold red] {str(e)}")
logging.exception("Download processing error")
else: else:
console.print("[bold red]Error: Item ID not found.[/bold red]") console.print("[bold red]Error: Item ID not found. Available fields:[/bold red]")
for key in selected_item.keys():
console.print(f"[yellow]- {key}: {selected_item[key]}[/yellow]")

View File

@ -21,7 +21,7 @@ from rich.prompt import Prompt
from .global_search import global_search from .global_search import global_search
from StreamingCommunity.Util.message import start_message from StreamingCommunity.Util.message import start_message
from StreamingCommunity.Util.config_json import config_manager from StreamingCommunity.Util.config_json import config_manager
from StreamingCommunity.Util.os import os_summary from StreamingCommunity.Util.os import os_summary, internet_manager
from StreamingCommunity.Util.logger import Logger from StreamingCommunity.Util.logger import Logger
from StreamingCommunity.Upload.update import update as git_update from StreamingCommunity.Upload.update import update as git_update
from StreamingCommunity.Lib.TMBD import tmdb from StreamingCommunity.Lib.TMBD import tmdb
@ -200,6 +200,15 @@ def main(script_id = 0):
# Create logger # Create logger
log_not = Logger() log_not = Logger()
initialize() initialize()
if not internet_manager.check_dns_provider():
console.print("[red]❌ ERROR: DNS configuration is required!")
console.print("[red]The program cannot function correctly without proper DNS settings.")
console.print("[yellow]Please configure one of these DNS servers:")
console.print("[blue]• Cloudflare (1.1.1.1)")
console.print("[blue]• Quad9 (9.9.9.9)")
console.print("\n[yellow]⚠️ The program will not work until you configure your DNS settings.")
input("[yellow]Press Enter to exit...")
# Load search functions # Load search functions
search_functions = load_search_functions() search_functions = load_search_functions()
@ -381,4 +390,4 @@ def main(script_id = 0):
# Delete script_id # Delete script_id
script_id = TelegramSession.get_session() script_id = TelegramSession.get_session()
if script_id != "unknown": if script_id != "unknown":
TelegramSession.deleteScriptId(script_id) TelegramSession.deleteScriptId(script_id)

View File

@ -6,9 +6,10 @@ m3u8
certifi certifi
psutil psutil
unidecode unidecode
dnspython
jsbeautifier jsbeautifier
pathvalidate pathvalidate
pycryptodomex pycryptodomex
ua-generator ua-generator
qbittorrent-api qbittorrent-api
pyTelegramBotAPI pyTelegramBotAPI