mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-06 19:45:24 +00:00
Merge branch 'Arrowar:main' into main
This commit is contained in:
commit
943706c755
263
.github/.domain/domain_update.py
vendored
Normal file
263
.github/.domain/domain_update.py
vendored
Normal file
@ -0,0 +1,263 @@
|
|||||||
|
# 20.04.2024
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import ua_generator
|
||||||
|
|
||||||
|
JSON_FILE_PATH = os.path.join(".github", ".domain", "domains.json")
|
||||||
|
|
||||||
|
|
||||||
|
def load_domains(file_path):
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
print(f"Error: The file {file_path} was not found.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(file_path, 'r', encoding='utf-8') as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading the file {file_path}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def save_domains(file_path, data):
|
||||||
|
try:
|
||||||
|
with open(file_path, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
print(f"Data successfully saved to {file_path}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error saving the file {file_path}: {e}")
|
||||||
|
|
||||||
|
def get_new_tld(full_url):
|
||||||
|
try:
|
||||||
|
parsed_url = urlparse(full_url)
|
||||||
|
hostname = parsed_url.hostname
|
||||||
|
if hostname:
|
||||||
|
parts = hostname.split('.')
|
||||||
|
return parts[-1]
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def extract_domain_from_response(response, original_url):
|
||||||
|
if 'location' in response.headers:
|
||||||
|
return response.headers['location']
|
||||||
|
|
||||||
|
if str(response.url) != original_url:
|
||||||
|
return str(response.url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_type = response.headers.get('content-type', '').lower()
|
||||||
|
if 'text/html' in content_type or 'text/plain' in content_type:
|
||||||
|
response_text = response.text
|
||||||
|
|
||||||
|
js_redirect_patterns = [
|
||||||
|
r'window\.location\.href\s*=\s*["\']([^"\']+)["\']',
|
||||||
|
r'window\.location\s*=\s*["\']([^"\']+)["\']',
|
||||||
|
r'location\.href\s*=\s*["\']([^"\']+)["\']',
|
||||||
|
r'document\.location\s*=\s*["\']([^"\']+)["\']'
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in js_redirect_patterns:
|
||||||
|
js_match = re.search(pattern, response_text, re.IGNORECASE)
|
||||||
|
if js_match:
|
||||||
|
return js_match.group(1)
|
||||||
|
|
||||||
|
meta_patterns = [
|
||||||
|
r'<meta[^>]*http-equiv=["\']?refresh["\']?[^>]*content=["\'][^"\']*url=([^"\'>\s]+)',
|
||||||
|
r'<meta[^>]*content=["\'][^"\']*url=([^"\'>\s]+)[^>]*http-equiv=["\']?refresh["\']?'
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in meta_patterns:
|
||||||
|
meta_match = re.search(pattern, response_text, re.IGNORECASE)
|
||||||
|
if meta_match:
|
||||||
|
return meta_match.group(1)
|
||||||
|
|
||||||
|
canonical_match = re.search(r'<link[^>]*rel=["\']?canonical["\']?[^>]*href=["\']([^"\']+)["\']', response_text, re.IGNORECASE)
|
||||||
|
if canonical_match:
|
||||||
|
return canonical_match.group(1)
|
||||||
|
|
||||||
|
base_match = re.search(r'<base[^>]*href=["\']([^"\']+)["\']', response_text, re.IGNORECASE)
|
||||||
|
if base_match:
|
||||||
|
return base_match.group(1)
|
||||||
|
|
||||||
|
error_redirect_patterns = [
|
||||||
|
r'[Rr]edirect(?:ed)?\s+to:?\s*([^\s<>"\']+)',
|
||||||
|
r'[Nn]ew\s+[Uu][Rr][Ll]:?\s*([^\s<>"\']+)',
|
||||||
|
r'[Mm]oved\s+to:?\s*([^\s<>"\']+)',
|
||||||
|
r'[Ff]ound\s+at:?\s*([^\s<>"\']+)'
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in error_redirect_patterns:
|
||||||
|
error_match = re.search(pattern, response_text)
|
||||||
|
if error_match:
|
||||||
|
potential_url = error_match.group(1)
|
||||||
|
if potential_url.startswith(('http://', 'https://', '//')):
|
||||||
|
return potential_url
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" [!] Error extracting from response content: {e}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def try_url(url_to_try, headers, timeout=15):
|
||||||
|
try:
|
||||||
|
with httpx.Client(headers=headers, timeout=timeout, follow_redirects=False) as client:
|
||||||
|
response = client.get(url_to_try)
|
||||||
|
|
||||||
|
if response.status_code in [301, 302, 303, 307, 308]:
|
||||||
|
location = response.headers.get('location')
|
||||||
|
if location:
|
||||||
|
print(f" [+] Found redirect ({response.status_code}) to: {location}")
|
||||||
|
try:
|
||||||
|
final_response = client.get(location)
|
||||||
|
if 200 <= final_response.status_code < 400:
|
||||||
|
return final_response
|
||||||
|
else:
|
||||||
|
return httpx.Response(
|
||||||
|
status_code=200,
|
||||||
|
headers={"location": location},
|
||||||
|
content=b"",
|
||||||
|
request=response.request
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return httpx.Response(
|
||||||
|
status_code=200,
|
||||||
|
headers={"location": location},
|
||||||
|
content=b"",
|
||||||
|
request=response.request
|
||||||
|
)
|
||||||
|
|
||||||
|
elif response.status_code in [403, 409, 429, 503]:
|
||||||
|
print(f" [!] HTTP {response.status_code} - attempting to extract redirect info")
|
||||||
|
|
||||||
|
location = response.headers.get('location')
|
||||||
|
if location:
|
||||||
|
print(f" [+] Found location header in error response: {location}")
|
||||||
|
return httpx.Response(
|
||||||
|
status_code=200,
|
||||||
|
headers={"location": location},
|
||||||
|
content=b"",
|
||||||
|
request=response.request
|
||||||
|
)
|
||||||
|
|
||||||
|
new_url = extract_domain_from_response(response, url_to_try)
|
||||||
|
if new_url and new_url != url_to_try:
|
||||||
|
print(f" [+] Found redirect URL in error response content: {new_url}")
|
||||||
|
return httpx.Response(
|
||||||
|
status_code=200,
|
||||||
|
headers={"location": new_url},
|
||||||
|
content=b"",
|
||||||
|
request=response.request
|
||||||
|
)
|
||||||
|
|
||||||
|
if 200 <= response.status_code < 400:
|
||||||
|
return response
|
||||||
|
|
||||||
|
print(f" [!] HTTP {response.status_code} for {url_to_try}")
|
||||||
|
|
||||||
|
except httpx.HTTPStatusError as http_err:
|
||||||
|
new_url = extract_domain_from_response(http_err.response, url_to_try)
|
||||||
|
if new_url:
|
||||||
|
print(f" [+] Found new URL from HTTPStatusError response: {new_url}")
|
||||||
|
return httpx.Response(
|
||||||
|
status_code=200,
|
||||||
|
headers={"location": new_url},
|
||||||
|
content=b"",
|
||||||
|
request=http_err.request
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f" [!] Error for {url_to_try}: {type(e).__name__}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_domain_entries(data):
|
||||||
|
if not data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
updated_count = 0
|
||||||
|
|
||||||
|
for key, entry in data.items():
|
||||||
|
print(f"\n--- [DOMAIN] {key} ---")
|
||||||
|
original_full_url = entry.get("full_url")
|
||||||
|
original_domain_in_entry = entry.get("domain")
|
||||||
|
|
||||||
|
if not original_full_url:
|
||||||
|
print(f" [!] 'full_url' missing. Skipped.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
ua = ua_generator.generate(device=('desktop', 'mobile'), browser=('chrome', 'edge', 'firefox', 'safari'))
|
||||||
|
current_headers = ua.headers.get()
|
||||||
|
|
||||||
|
print(f" [] Stored URL: {original_full_url}")
|
||||||
|
if original_domain_in_entry:
|
||||||
|
print(f" [] Stored Domain (TLD): {original_domain_in_entry}")
|
||||||
|
|
||||||
|
print(f" [] Testing URL: {original_full_url}")
|
||||||
|
response = try_url(original_full_url, current_headers)
|
||||||
|
|
||||||
|
if response:
|
||||||
|
final_url_from_request = str(response.url)
|
||||||
|
print(f" [+] Redirect/Response to: {final_url_from_request}")
|
||||||
|
|
||||||
|
parsed_final_url = urlparse(final_url_from_request)
|
||||||
|
normalized_full_url = urlunparse(parsed_final_url._replace(path='/', params='', query='', fragment=''))
|
||||||
|
if parsed_final_url.path == '' and not normalized_full_url.endswith('/'):
|
||||||
|
normalized_full_url += '/'
|
||||||
|
|
||||||
|
if normalized_full_url != final_url_from_request:
|
||||||
|
print(f" [+] Normalized URL: {normalized_full_url}")
|
||||||
|
|
||||||
|
if normalized_full_url != original_full_url:
|
||||||
|
new_tld_val = get_new_tld(final_url_from_request)
|
||||||
|
|
||||||
|
if new_tld_val:
|
||||||
|
entry["full_url"] = normalized_full_url
|
||||||
|
|
||||||
|
if new_tld_val != original_domain_in_entry:
|
||||||
|
print(f" [-] Domain TLD Changed: '{original_domain_in_entry}' -> '{new_tld_val}'")
|
||||||
|
entry["old_domain"] = original_domain_in_entry if original_domain_in_entry else entry.get("old_domain", "")
|
||||||
|
entry["domain"] = new_tld_val
|
||||||
|
entry["time_change"] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
print(f" [-] Domain & URL Updated: New TLD '{new_tld_val}', New URL '{normalized_full_url}'")
|
||||||
|
else:
|
||||||
|
entry["domain"] = new_tld_val
|
||||||
|
print(f" [-] URL Updated (TLD Unchanged '{new_tld_val}'): New URL '{normalized_full_url}'")
|
||||||
|
|
||||||
|
updated_count += 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(f" [!] Could not extract TLD from {final_url_from_request}. URL not updated despite potential change.")
|
||||||
|
else:
|
||||||
|
print(f" [] Same Domain: {final_url_from_request}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(f" [-] No response for {key}")
|
||||||
|
|
||||||
|
return updated_count > 0
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print("Starting domain update script...")
|
||||||
|
domain_data = load_domains(JSON_FILE_PATH)
|
||||||
|
|
||||||
|
if domain_data:
|
||||||
|
if update_domain_entries(domain_data):
|
||||||
|
save_domains(JSON_FILE_PATH, domain_data)
|
||||||
|
print("\nUpdate complete. Some entries were modified.")
|
||||||
|
else:
|
||||||
|
print("\nUpdate complete. No domains were modified.")
|
||||||
|
else:
|
||||||
|
print("\nCannot proceed without domain data.")
|
||||||
|
|
||||||
|
print("Script finished.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
62
.github/.domain/domains.json
vendored
Normal file
62
.github/.domain/domains.json
vendored
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"1337xx": {
|
||||||
|
"domain": "to",
|
||||||
|
"full_url": "https://www.1337xx.to/",
|
||||||
|
"old_domain": "to",
|
||||||
|
"time_change": "2025-03-19 12:20:19"
|
||||||
|
},
|
||||||
|
"cb01new": {
|
||||||
|
"domain": "life",
|
||||||
|
"full_url": "https://cb01net.life/",
|
||||||
|
"old_domain": "download",
|
||||||
|
"time_change": "2025-06-01 01:02:16"
|
||||||
|
},
|
||||||
|
"animeunity": {
|
||||||
|
"domain": "so",
|
||||||
|
"full_url": "https://www.animeunity.so/",
|
||||||
|
"old_domain": "so",
|
||||||
|
"time_change": "2025-03-19 12:20:23"
|
||||||
|
},
|
||||||
|
"animeworld": {
|
||||||
|
"domain": "ac",
|
||||||
|
"full_url": "https://www.animeworld.ac/",
|
||||||
|
"old_domain": "ac",
|
||||||
|
"time_change": "2025-03-21 12:20:27"
|
||||||
|
},
|
||||||
|
"guardaserie": {
|
||||||
|
"domain": "meme",
|
||||||
|
"full_url": "https://guardaserie.meme/",
|
||||||
|
"old_domain": "meme",
|
||||||
|
"time_change": "2025-03-19 12:20:24"
|
||||||
|
},
|
||||||
|
"ddlstreamitaly": {
|
||||||
|
"domain": "co",
|
||||||
|
"full_url": "https://ddlstreamitaly.co/",
|
||||||
|
"old_domain": "co",
|
||||||
|
"time_change": "2025-03-19 12:20:26"
|
||||||
|
},
|
||||||
|
"streamingwatch": {
|
||||||
|
"domain": "org",
|
||||||
|
"full_url": "https://www.streamingwatch.org/",
|
||||||
|
"old_domain": "org",
|
||||||
|
"time_change": "2025-04-29 12:30:30"
|
||||||
|
},
|
||||||
|
"altadefinizione": {
|
||||||
|
"domain": "spa",
|
||||||
|
"full_url": "https://altadefinizione.spa/",
|
||||||
|
"old_domain": "locker",
|
||||||
|
"time_change": "2025-05-26 23:22:45"
|
||||||
|
},
|
||||||
|
"streamingcommunity": {
|
||||||
|
"domain": "bio",
|
||||||
|
"full_url": "https://streamingunity.bio/",
|
||||||
|
"old_domain": "blog",
|
||||||
|
"time_change": "2025-05-31 12:17:33"
|
||||||
|
},
|
||||||
|
"altadefinizionegratis": {
|
||||||
|
"domain": "icu",
|
||||||
|
"full_url": "https://altadefinizionegratis.icu/",
|
||||||
|
"old_domain": "taipei",
|
||||||
|
"time_change": "2025-05-18 11:21:05"
|
||||||
|
}
|
||||||
|
}
|
32
.github/.site/js/script.js
vendored
32
.github/.site/js/script.js
vendored
@ -113,43 +113,27 @@ async function checkSiteStatus(url, siteName) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const supabaseUrl = 'https://zvfngpoxwrgswnzytadh.supabase.co';
|
const domainsJsonUrl = 'https://raw.githubusercontent.com/Arrowar/StreamingCommunity/refs/heads/main/.github/.domain/domains.json';
|
||||||
const supabaseKey = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Inp2Zm5ncG94d3Jnc3duenl0YWRoIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NDAxNTIxNjMsImV4cCI6MjA1NTcyODE2M30.FNTCCMwi0QaKjOu8gtZsT5yQttUW8QiDDGXmzkn89QE';
|
|
||||||
|
|
||||||
async function loadSiteData() {
|
async function loadSiteData() {
|
||||||
try {
|
try {
|
||||||
console.log('Starting to load site data...');
|
console.log('Starting to load site data from GitHub...');
|
||||||
|
|
||||||
createStatusIndicator();
|
createStatusIndicator();
|
||||||
updateStatusIndicator('Loading...', 'Fetching site data from database...', 0);
|
updateStatusIndicator('Loading...', 'Fetching site data from GitHub repository...', 0);
|
||||||
|
|
||||||
const siteList = document.getElementById('site-list');
|
const siteList = document.getElementById('site-list');
|
||||||
|
|
||||||
const headers = {
|
console.log(`Fetching from GitHub: ${domainsJsonUrl}`);
|
||||||
'accept': '*/*',
|
const response = await fetch(domainsJsonUrl);
|
||||||
'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
|
|
||||||
'apikey': supabaseKey,
|
|
||||||
'authorization': `Bearer ${supabaseKey}`,
|
|
||||||
'content-type': 'application/json',
|
|
||||||
'cache-control': 'no-cache',
|
|
||||||
'pragma': 'no-cache',
|
|
||||||
'range': '0-9'
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log('Fetching from Supabase with headers:', headers);
|
|
||||||
const response = await fetch(`${supabaseUrl}/rest/v1/public?select=*`, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: headers
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) throw new Error(`HTTP error! Status: ${response.status}`);
|
if (!response.ok) throw new Error(`HTTP error! Status: ${response.status}`);
|
||||||
|
|
||||||
const data = await response.json();
|
const configSite = await response.json(); // Directly get the site data object
|
||||||
|
|
||||||
siteList.innerHTML = '';
|
siteList.innerHTML = '';
|
||||||
|
|
||||||
if (data && data.length > 0) {
|
if (configSite && Object.keys(configSite).length > 0) { // Check if configSite is a non-empty object
|
||||||
const configSite = data[0].data;
|
|
||||||
totalSites = Object.keys(configSite).length;
|
totalSites = Object.keys(configSite).length;
|
||||||
completedSites = 0;
|
completedSites = 0;
|
||||||
let latestUpdate = new Date(0);
|
let latestUpdate = new Date(0);
|
||||||
@ -239,7 +223,7 @@ async function loadSiteData() {
|
|||||||
document.getElementById('last-update-time').textContent = formattedDate;
|
document.getElementById('last-update-time').textContent = formattedDate;
|
||||||
} else {
|
} else {
|
||||||
siteList.innerHTML = '<div class="no-sites">No sites available</div>';
|
siteList.innerHTML = '<div class="no-sites">No sites available</div>';
|
||||||
updateStatusIndicator('Ready', 'No sites found in database', 100);
|
updateStatusIndicator('Ready', 'No sites found in the JSON file.', 100);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Errore:', error);
|
console.error('Errore:', error);
|
||||||
|
49
.github/workflows/update_domain.yml
vendored
Normal file
49
.github/workflows/update_domain.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
name: Update domains
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 */2 * * *"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update-domains:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
pip install httpx ua-generator requests
|
||||||
|
pip install --upgrade pip setuptools wheel
|
||||||
|
|
||||||
|
- name: Configure DNS
|
||||||
|
run: |
|
||||||
|
sudo sh -c 'echo "nameserver 9.9.9.9" > /etc/resolv.conf'
|
||||||
|
cat /etc/resolv.conf
|
||||||
|
|
||||||
|
- name: Execute domain update script
|
||||||
|
run: python .github/.domain/domain_update.py
|
||||||
|
|
||||||
|
- name: Commit and push changes (if any)
|
||||||
|
run: |
|
||||||
|
git config --global user.name 'github-actions[bot]'
|
||||||
|
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||||
|
|
||||||
|
# Check if domains.json was modified
|
||||||
|
if ! git diff --quiet .github/.domain/domains.json; then
|
||||||
|
git add .github/.domain/domains.json
|
||||||
|
git commit -m "Automatic domain update [skip ci]"
|
||||||
|
echo "Changes committed. Attempting to push..."
|
||||||
|
git push
|
||||||
|
else
|
||||||
|
echo "No changes to .github/.domain/domains.json to commit."
|
||||||
|
fi
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -52,5 +52,4 @@ cmd.txt
|
|||||||
bot_config.json
|
bot_config.json
|
||||||
scripts.json
|
scripts.json
|
||||||
active_requests.json
|
active_requests.json
|
||||||
domains.json
|
|
||||||
working_proxies.json
|
working_proxies.json
|
@ -268,33 +268,32 @@ class ConfigManager:
|
|||||||
self._load_site_data_from_file()
|
self._load_site_data_from_file()
|
||||||
|
|
||||||
def _load_site_data_from_api(self) -> None:
|
def _load_site_data_from_api(self) -> None:
|
||||||
"""Load site data from API."""
|
"""Load site data from GitHub."""
|
||||||
|
domains_github_url = "https://raw.githubusercontent.com/Arrowar/StreamingCommunity/refs/heads/main/.github/.domain/domains.json"
|
||||||
headers = {
|
headers = {
|
||||||
"apikey": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Inp2Zm5ncG94d3Jnc3duenl0YWRoIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NDAxNTIxNjMsImV4cCI6MjA1NTcyODE2M30.FNTCCMwi0QaKjOu8gtZsT5yQttUW8QiDDGXmzkn89QE",
|
|
||||||
"Authorization": f"Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Inp2Zm5ncG94d3Jnc3duenl0YWRoIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NDAxNTIxNjMsImV4cCI6MjA1NTcyODE2M30.FNTCCMwi0QaKjOu8gtZsT5yQttUW8QiDDGXmzkn89QE",
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"User-Agent": get_userAgent()
|
"User-Agent": get_userAgent()
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
console.print("[bold cyan]Retrieving site data from API...[/bold cyan]")
|
console.print(f"[bold cyan]Retrieving site data from GitHub:[/bold cyan] [green]{domains_github_url}[/green]")
|
||||||
response = requests.get("https://zvfngpoxwrgswnzytadh.supabase.co/rest/v1/public", timeout=8, headers=headers)
|
response = requests.get(domains_github_url, timeout=8, headers=headers)
|
||||||
|
|
||||||
if response.ok:
|
if response.ok:
|
||||||
data = response.json()
|
self.configSite = response.json()
|
||||||
if data and len(data) > 0:
|
|
||||||
self.configSite = data[0]['data']
|
|
||||||
|
|
||||||
site_count = len(self.configSite) if isinstance(self.configSite, dict) else 0
|
site_count = len(self.configSite) if isinstance(self.configSite, dict) else 0
|
||||||
|
console.print(f"[bold green]Site data loaded from GitHub:[/bold green] {site_count} streaming services found.")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
console.print("[bold yellow]API returned an empty data set[/bold yellow]")
|
console.print(f"[bold red]GitHub request failed:[/bold red] HTTP {response.status_code}, {response.text[:100]}")
|
||||||
else:
|
self._handle_site_data_fallback()
|
||||||
console.print(f"[bold red]API request failed:[/bold red] HTTP {response.status_code}, {response.text[:100]}")
|
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
console.print(f"[bold red]Error parsing JSON from GitHub:[/bold red] {str(e)}")
|
||||||
self._handle_site_data_fallback()
|
self._handle_site_data_fallback()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.print(f"[bold red]API connection error:[/bold red] {str(e)}")
|
console.print(f"[bold red]GitHub connection error:[/bold red] {str(e)}")
|
||||||
self._handle_site_data_fallback()
|
self._handle_site_data_fallback()
|
||||||
|
|
||||||
def _load_site_data_from_file(self) -> None:
|
def _load_site_data_from_file(self) -> None:
|
||||||
|
@ -12,7 +12,7 @@ import inspect
|
|||||||
import subprocess
|
import subprocess
|
||||||
import contextlib
|
import contextlib
|
||||||
import importlib.metadata
|
import importlib.metadata
|
||||||
|
import socket
|
||||||
|
|
||||||
# External library
|
# External library
|
||||||
from unidecode import unidecode
|
from unidecode import unidecode
|
||||||
@ -283,38 +283,62 @@ class InternManager():
|
|||||||
else:
|
else:
|
||||||
return f"{bytes / (1024 * 1024):.2f} MB/s"
|
return f"{bytes / (1024 * 1024):.2f} MB/s"
|
||||||
|
|
||||||
def check_dns_provider(self):
|
# def check_dns_provider(self):
|
||||||
|
# """
|
||||||
|
# Check if the system's current DNS server matches any known DNS providers.
|
||||||
|
|
||||||
|
# Returns:
|
||||||
|
# bool: True if the current DNS server matches a known provider,
|
||||||
|
# False if no match is found or in case of errors
|
||||||
|
# """
|
||||||
|
# dns_providers = {
|
||||||
|
# "Cloudflare": ["1.1.1.1", "1.0.0.1"],
|
||||||
|
# "Google": ["8.8.8.8", "8.8.4.4"],
|
||||||
|
# "OpenDNS": ["208.67.222.222", "208.67.220.220"],
|
||||||
|
# "Quad9": ["9.9.9.9", "149.112.112.112"],
|
||||||
|
# "AdGuard": ["94.140.14.14", "94.140.15.15"],
|
||||||
|
# "Comodo": ["8.26.56.26", "8.20.247.20"],
|
||||||
|
# "Level3": ["209.244.0.3", "209.244.0.4"],
|
||||||
|
# "Norton": ["199.85.126.10", "199.85.127.10"],
|
||||||
|
# "CleanBrowsing": ["185.228.168.9", "185.228.169.9"],
|
||||||
|
# "Yandex": ["77.88.8.8", "77.88.8.1"]
|
||||||
|
# }
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# resolver = dns.resolver.Resolver()
|
||||||
|
# nameservers = resolver.nameservers
|
||||||
|
|
||||||
|
# if not nameservers:
|
||||||
|
# return False
|
||||||
|
|
||||||
|
# for server in nameservers:
|
||||||
|
# for provider, ips in dns_providers.items():
|
||||||
|
# if server in ips:
|
||||||
|
# return True
|
||||||
|
# return False
|
||||||
|
|
||||||
|
# except Exception:
|
||||||
|
# return False
|
||||||
|
|
||||||
|
def check_dns_resolve(self):
|
||||||
"""
|
"""
|
||||||
Check if the system's current DNS server matches any known DNS providers.
|
Check if the system's current DNS server can resolve a domain name.
|
||||||
|
Works on both Windows and Unix-like systems.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if the current DNS server matches a known provider,
|
bool: True if the current DNS server can resolve a domain name,
|
||||||
False if no match is found or in case of errors
|
False if can't resolve or in case of errors
|
||||||
"""
|
"""
|
||||||
dns_providers = {
|
test_domains = ["github.com", "google.com", "microsoft.com", "amazon.com"]
|
||||||
"Cloudflare": ["1.1.1.1", "1.0.0.1"],
|
|
||||||
"Google": ["8.8.8.8", "8.8.4.4"],
|
|
||||||
"OpenDNS": ["208.67.222.222", "208.67.220.220"],
|
|
||||||
"Quad9": ["9.9.9.9", "149.112.112.112"],
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resolver = dns.resolver.Resolver()
|
for domain in test_domains:
|
||||||
nameservers = resolver.nameservers
|
# socket.gethostbyname() works consistently across all platforms
|
||||||
|
socket.gethostbyname(domain)
|
||||||
if not nameservers:
|
|
||||||
return False
|
|
||||||
|
|
||||||
for server in nameservers:
|
|
||||||
for provider, ips in dns_providers.items():
|
|
||||||
if server in ips:
|
|
||||||
return True
|
return True
|
||||||
|
except (socket.gaierror, socket.error):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class OsSummary:
|
class OsSummary:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.ffmpeg_path = None
|
self.ffmpeg_path = None
|
||||||
|
@ -210,7 +210,19 @@ def main(script_id = 0):
|
|||||||
log_not = Logger()
|
log_not = Logger()
|
||||||
initialize()
|
initialize()
|
||||||
|
|
||||||
if not internet_manager.check_dns_provider():
|
# if not internet_manager.check_dns_provider():
|
||||||
|
# print()
|
||||||
|
# console.print("[red]❌ ERROR: DNS configuration is required!")
|
||||||
|
# console.print("[red]The program cannot function correctly without proper DNS settings.")
|
||||||
|
# console.print("[yellow]Please configure one of these DNS servers:")
|
||||||
|
# console.print("[blue]• Cloudflare (1.1.1.1) 'https://developers.cloudflare.com/1.1.1.1/setup/windows/'")
|
||||||
|
# console.print("[blue]• Quad9 (9.9.9.9) 'https://docs.quad9.net/Setup_Guides/Windows/Windows_10/'")
|
||||||
|
# console.print("\n[yellow]⚠️ The program will not work until you configure your DNS settings.")
|
||||||
|
|
||||||
|
# time.sleep(2)
|
||||||
|
# msg.ask("[yellow]Press Enter to continue ...")
|
||||||
|
|
||||||
|
if not internet_manager.check_dns_resolve():
|
||||||
print()
|
print()
|
||||||
console.print("[red]❌ ERROR: DNS configuration is required!")
|
console.print("[red]❌ ERROR: DNS configuration is required!")
|
||||||
console.print("[red]The program cannot function correctly without proper DNS settings.")
|
console.print("[red]The program cannot function correctly without proper DNS settings.")
|
||||||
@ -219,8 +231,7 @@ def main(script_id = 0):
|
|||||||
console.print("[blue]• Quad9 (9.9.9.9) 'https://docs.quad9.net/Setup_Guides/Windows/Windows_10/'")
|
console.print("[blue]• Quad9 (9.9.9.9) 'https://docs.quad9.net/Setup_Guides/Windows/Windows_10/'")
|
||||||
console.print("\n[yellow]⚠️ The program will not work until you configure your DNS settings.")
|
console.print("\n[yellow]⚠️ The program will not work until you configure your DNS settings.")
|
||||||
|
|
||||||
time.sleep(2)
|
os._exit(0)
|
||||||
msg.ask("[yellow]Press Enter to continue ...")
|
|
||||||
|
|
||||||
# Load search functions
|
# Load search functions
|
||||||
search_functions = load_search_functions()
|
search_functions = load_search_functions()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user