mirror of
https://github.com/Arrowar/StreamingCommunity.git
synced 2025-06-05 02:55:25 +00:00
parent
e4f0080a4b
commit
34f6626a58
7
.github/workflows/build.yml
vendored
7
.github/workflows/build.yml
vendored
@ -13,11 +13,11 @@ on:
|
||||
- 'false'
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
- "v*.*"
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
if: github.event.inputs.publish_pypi == 'true'
|
||||
if: startsWith(github.ref_name, 'v') && github.event.inputs.publish_pypi == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@ -39,6 +39,7 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install setuptools wheel twine
|
||||
|
||||
- name: Build package
|
||||
run: python setup.py sdist bdist_wheel
|
||||
|
||||
@ -49,7 +50,7 @@ jobs:
|
||||
run: twine upload dist/*
|
||||
|
||||
build:
|
||||
if: github.event.inputs.publish_pypi == 'false'
|
||||
if: startsWith(github.ref_name, 'v') && github.event.inputs.publish_pypi == 'false'
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest, macos-latest]
|
||||
|
@ -51,10 +51,12 @@ def title_search(word_to_search: str) -> int:
|
||||
console.print("[yellow]The service might be temporarily unavailable or the domain may have changed.[/yellow]")
|
||||
sys.exit(1)
|
||||
|
||||
# Construct the full site URL and load the search page
|
||||
search_url = f"{site_constant.FULL_URL}/search/{word_to_search}/1/"
|
||||
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
url=f"{site_constant.FULL_URL}/search/{word_to_search}/1/",
|
||||
url=search_url,
|
||||
headers={'user-agent': get_userAgent()},
|
||||
follow_redirects=True,
|
||||
timeout=max_timeout
|
||||
|
@ -51,12 +51,19 @@ def title_search(word_to_search: str) -> int:
|
||||
console.print("[yellow]The service might be temporarily unavailable or the domain may have changed.[/yellow]")
|
||||
sys.exit(1)
|
||||
|
||||
response = httpx.get(
|
||||
url=f"{site_constant.FULL_URL}/?s={word_to_search}",
|
||||
headers={'user-agent': get_userAgent()},
|
||||
timeout=max_timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
search_url = f"{site_constant.FULL_URL}/?s={word_to_search}"
|
||||
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
url=search_url,
|
||||
headers={'user-agent': get_userAgent()},
|
||||
timeout=max_timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
except Exception as e:
|
||||
console.print(f"Site: {site_constant.SITE_NAME}, request search error: {e}")
|
||||
|
||||
# Create soup and find table
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
|
@ -53,10 +53,12 @@ def title_search(word_to_search: str) -> int:
|
||||
console.print("[yellow]The service might be temporarily unavailable or the domain may have changed.[/yellow]")
|
||||
sys.exit(1)
|
||||
|
||||
# Send request to search for titles
|
||||
search_url = f"{site_constant.FULL_URL}/search/?&q={word_to_search}&quick=1&type=videobox_video&nodes=11"
|
||||
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
url=f"{site_constant.FULL_URL}/search/?&q={word_to_search}&quick=1&type=videobox_video&nodes=11",
|
||||
url=search_url,
|
||||
headers={'user-agent': get_userAgent()},
|
||||
timeout=max_timeout
|
||||
)
|
||||
|
@ -51,11 +51,12 @@ def title_search(word_to_search: str) -> int:
|
||||
console.print("[yellow]The service might be temporarily unavailable or the domain may have changed.[/yellow]")
|
||||
sys.exit(1)
|
||||
|
||||
# Send request to search for titles
|
||||
print(f"{site_constant.FULL_URL}/?story={word_to_search}&do=search&subaction=search")
|
||||
search_url = f"{site_constant.FULL_URL}/?story={word_to_search}&do=search&subaction=search"
|
||||
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
url=f"{site_constant.FULL_URL}/?story={word_to_search}&do=search&subaction=search",
|
||||
url=search_url,
|
||||
headers={'user-agent': get_userAgent()},
|
||||
timeout=max_timeout
|
||||
)
|
||||
|
@ -54,10 +54,13 @@ def title_search(title_search: str) -> int:
|
||||
|
||||
media_search_manager.clear()
|
||||
table_show_manager.clear()
|
||||
|
||||
|
||||
search_url = f"{site_constant.FULL_URL}/api/search?q={title_search}",
|
||||
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
url=f"{site_constant.FULL_URL}/api/search?q={title_search.replace(' ', '+')}",
|
||||
url=search_url,
|
||||
headers={'user-agent': get_userAgent()},
|
||||
timeout=max_timeout
|
||||
)
|
||||
|
42
update.py
42
update.py
@ -46,33 +46,37 @@ def move_content(source: str, destination: str):
|
||||
|
||||
def keep_specific_items(directory: str, keep_folder: str, keep_file: str):
|
||||
"""
|
||||
Delete all items in the directory except for the specified folder and file.
|
||||
Deletes all items in the given directory except for the specified folder,
|
||||
the specified file, and the '.git' directory.
|
||||
|
||||
Parameters:
|
||||
- directory (str): The path to the directory.
|
||||
- keep_folder (str): The name of the folder to keep.
|
||||
- keep_file (str): The name of the file to keep.
|
||||
"""
|
||||
try:
|
||||
if not os.path.exists(directory) or not os.path.isdir(directory):
|
||||
raise ValueError(f"Error: '{directory}' is not a valid directory.")
|
||||
if not os.path.exists(directory) or not os.path.isdir(directory):
|
||||
console.print(f"[red]Error: '{directory}' is not a valid directory.")
|
||||
return
|
||||
|
||||
# Iterate through items in the directory
|
||||
for item in os.listdir(directory):
|
||||
item_path = os.path.join(directory, item)
|
||||
# Define folders and files to skip
|
||||
skip_folders = {keep_folder, ".git"}
|
||||
skip_files = {keep_file}
|
||||
|
||||
# Check if the item is the specified folder or file
|
||||
if os.path.isdir(item_path) and item != keep_folder:
|
||||
# Iterate through items in the directory
|
||||
for item in os.listdir(directory):
|
||||
if item in skip_folders or item in skip_files:
|
||||
continue
|
||||
|
||||
item_path = os.path.join(directory, item)
|
||||
try:
|
||||
if os.path.isdir(item_path):
|
||||
shutil.rmtree(item_path)
|
||||
|
||||
elif os.path.isfile(item_path) and item != keep_file:
|
||||
console.log(f"[green]Removed directory: {item_path}")
|
||||
elif os.path.isfile(item_path):
|
||||
os.remove(item_path)
|
||||
|
||||
except PermissionError as pe:
|
||||
console.print(f"[red]PermissionError: {pe}. Check permissions and try again.")
|
||||
|
||||
except Exception as e:
|
||||
console.print(f"[red]Error: {e}")
|
||||
console.log(f"[green]Removed file: {item_path}")
|
||||
except Exception as e:
|
||||
console.log(f"[yellow]Skipping {item_path} due to error: {e}")
|
||||
|
||||
|
||||
def print_commit_info(commit_info: dict):
|
||||
@ -177,14 +181,14 @@ def main_upload():
|
||||
Main function to upload the latest commit of a GitHub repository.
|
||||
"""
|
||||
cmd_insert = Prompt.ask(
|
||||
"[bold red]Are you sure you want to delete all files? (Only 'Video' folder and 'update_version.py' will remain)",
|
||||
"[bold red]Are you sure you want to delete all files? (Only 'Video' folder and 'update.py' will remain)",
|
||||
choices=['y', 'n'],
|
||||
default='y',
|
||||
show_choices=True
|
||||
)
|
||||
|
||||
if cmd_insert.lower().strip() == 'y' or cmd_insert.lower().strip() == 'yes':
|
||||
console.print("[red]Deleting all files except 'Video' folder and 'update_version.py'...")
|
||||
console.print("[red]Deleting all files except 'Video' folder and 'update.py'...")
|
||||
keep_specific_items(".", "Video", "upload.py")
|
||||
download_and_extract_latest_commit()
|
||||
else:
|
||||
|
Loading…
x
Reference in New Issue
Block a user