Implement autocreation of proxy.json, removing tempout after execution

Signed-off-by: hax <hax@lainlounge.xyz>
This commit is contained in:
h@x 2025-02-09 03:04:49 +00:00
parent b86abb1ddb
commit 9b7f13988d

137
main.py
View file

@ -6,90 +6,118 @@ import time
import sys import sys
import subprocess import subprocess
import json import json
import importlib
import inspect
from concurrent.futures import ThreadPoolExecutor, as_completed
from proxy_provider import ProxyProvider
from proxy_providers import *
from tqdm import tqdm
PROXIES_LIST_URL = "https://nnp.nnchan.ru/mahoproxy.php?u=https://api.sandvpn.com/fetch-free-proxys"
SPEEDTEST_URL = "http://212.183.159.230/5MB.zip" SPEEDTEST_URL = "http://212.183.159.230/5MB.zip"
def fetch_proxies():
"""Fetch the list of proxies from the defined URL."""
try:
response = requests.get(PROXIES_LIST_URL)
response.raise_for_status()
return response.json()
except (requests.RequestException, json.JSONDecodeError) as e:
print(f"Failed to fetch proxies: {e}")
return []
def is_valid_proxy(proxy): def is_valid_proxy(proxy):
"""Check if the proxy is valid and not from Russia (youtube is slowed down there).""" """Check if the proxy is valid."""
return proxy["host"] and proxy["country"] != "Russia" return proxy.get("host") is not None and proxy.get("country") != "Russia"
def construct_proxy_string(proxy): def construct_proxy_string(proxy):
"""Construct a proxy string from the proxy dictionary.""" """Construct a proxy string from the proxy dictionary."""
if proxy.get("username"): if proxy.get("username"):
return f'{proxy["username"]}:{proxy["password"]}@{proxy["host"]}:{proxy["port"]}' return (
f'{proxy["username"]}:{proxy["password"]}@{proxy["host"]}:{proxy["port"]}'
)
return f'{proxy["host"]}:{proxy["port"]}' return f'{proxy["host"]}:{proxy["port"]}'
def test_proxy(proxy): def test_proxy(proxy):
"""Test the proxy by measuring the download time.""" """Test the proxy by measuring the download time."""
proxy_str = construct_proxy_string(proxy) proxy_str = construct_proxy_string(proxy)
print(f'Testing {proxy_str}')
start_time = time.perf_counter() start_time = time.perf_counter()
try: try:
response = requests.get(SPEEDTEST_URL, stream=True, proxies={"http": proxy_str}, timeout=5) response = requests.get(
response.raise_for_status() SPEEDTEST_URL,
stream=True,
proxies={"http": f"http://{proxy_str}"},
timeout=5,
)
response.raise_for_status() # Ensure we raise an error for bad responses
total_length = response.headers.get('content-length') total_length = response.headers.get("content-length")
if total_length is None or int(total_length) != 5242880: if total_length is None or int(total_length) != 5242880:
print("No content or unexpected content size.")
return None return None
with io.BytesIO() as f: with io.BytesIO() as f:
download_time, downloaded_bytes = download_with_progress(response, f, total_length, start_time) download_time, _ = download_with_progress(
return {"time": download_time, **proxy} response, f, total_length, start_time
)
return {"time": download_time, **proxy} # Include original proxy info
except requests.RequestException: except requests.RequestException:
print("Proxy is dead, skipping...")
return None return None
def download_with_progress(response, f, total_length, start_time): def download_with_progress(response, f, total_length, start_time):
"""Download content with progress tracking.""" """Download content from the response with progress tracking."""
downloaded_bytes = 0 downloaded_bytes = 0
for chunk in response.iter_content(1024): for chunk in response.iter_content(1024):
downloaded_bytes += len(chunk) downloaded_bytes += len(chunk)
f.write(chunk) f.write(chunk)
done = int(30 * downloaded_bytes / int(total_length)) done = int(30 * downloaded_bytes / int(total_length))
speed = downloaded_bytes / (time.perf_counter() - start_time) / 100000 if done == 6:
break
# Check if download speed is too low and skip if necessary if (
if done > 3 and speed < 1.0: done > 3
print("\nProxy is too slow, skipping...") and (downloaded_bytes // (time.perf_counter() - start_time) / 100000) < 1.0
return float('inf'), downloaded_bytes ):
return float("inf"), downloaded_bytes
sys.stdout.write(f"\r[{'=' * done}{' ' * (30 - done)}] {speed:.2f} Mbps")
sys.stdout.write("\n")
return round(time.perf_counter() - start_time, 2), downloaded_bytes return round(time.perf_counter() - start_time, 2), downloaded_bytes
def get_best_proxies(proxies):
"""Return the top five proxies based on speed."""
proxy_times = [test_proxy(proxy) for proxy in proxies if is_valid_proxy(proxy)]
return sorted(filter(None, proxy_times), key=lambda x: x['time'])[:5]
def save_proxies_to_file(proxies, filename="proxy.json"): def save_proxies_to_file(proxies, filename="proxy.json"):
"""Save the best proxies to a JSON file.""" """Save the best proxies to a JSON file."""
try: with open(os.path.join(os.path.dirname(__file__), filename), "w") as f:
with open(os.path.join(os.path.dirname(__file__), filename), "w") as f: json.dump(proxies, f, indent=4)
json.dump(proxies, f, indent=4)
except IOError as e:
print(f"Failed to save proxies to file: {e}") def get_best_proxies(providers):
"""Return the top five proxies based on speed from all providers."""
all_proxies = []
proxies = None
for provider in providers:
try:
print(f"Fetching proxies from {provider.__class__.__name__}")
proxies = provider.fetch_proxies()
all_proxies.extend([proxy for proxy in proxies if is_valid_proxy(proxy)])
except Exception as e:
print(f"Failed to fetch proxies from {provider.__class__.__name__}: {e}")
best_proxies = []
with ThreadPoolExecutor(max_workers=2) as executor:
futures = {executor.submit(test_proxy, proxy): proxy for proxy in all_proxies}
for future in tqdm(as_completed(futures), total=len(futures), desc="Testing proxies", bar_format="{l_bar}{bar}| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, {rate_noinv_fmt}]", unit=' proxies', unit_scale=True, ncols=80):
result = future.result()
if result is not None:
best_proxies.append(result)
return sorted(best_proxies, key=lambda x: x["time"])[:5]
def update_proxies(): def update_proxies():
"""Update the proxies list and save the best ones.""" """Update the proxies list and save the best ones."""
proxies = fetch_proxies() providers = []
best_proxies = get_best_proxies(proxies) for filename in os.listdir(os.path.join(os.path.dirname(__file__), "proxy_providers")):
# Check if the file is a Python module
if filename.endswith(".py") and filename != "__init__.py":
module_name = filename[:-3] # Remove the '.py' suffix
module_path = f'{"proxy_providers"}.{module_name}'
module = importlib.import_module(module_path)
classes = inspect.getmembers(module, inspect.isclass)
providers.append(
[classs[-1]() for classs in classes if classs[0] != "ProxyProvider"][0]
)
best_proxies = get_best_proxies(providers)
save_proxies_to_file(best_proxies) save_proxies_to_file(best_proxies)
print("All done.") print("All done.")
def run_yt_dlp(): def run_yt_dlp():
"""Run yt-dlp with a randomly selected proxy.""" """Run yt-dlp with a randomly selected proxy."""
while True: while True:
@ -98,22 +126,24 @@ def run_yt_dlp():
proxy = random.choice(json.load(f)) proxy = random.choice(json.load(f))
proxy_str = construct_proxy_string(proxy) proxy_str = construct_proxy_string(proxy)
print(f"Using proxy from {proxy['city']}, {proxy['country']}") print(f"Using proxy from {proxy['city']}, {proxy['country']}")
if execute_yt_dlp_command(proxy_str): if execute_yt_dlp_command(proxy_str):
os.remove("tempout") os.remove("tempout")
break # Exit loop if command was successful break # Exit loop if command was successful
print("Got 'Sign in to confirm' error. Trying again with another proxy...") print("Got 'Sign in to confirm' error. Trying again with another proxy...")
except FileNotFoundError as e: except FileNotFoundError as e:
print("'proxy.json' not found. Starting proxy list update...") print("'proxy.json' not found. Starting proxy list update...")
update_proxies() update_proxies()
def execute_yt_dlp_command(proxy_str): def execute_yt_dlp_command(proxy_str):
"""Execute the yt-dlp command with the given proxy.""" """Execute the yt-dlp command with the given proxy."""
command = f"yt-dlp --color always --proxy '{proxy_str}' {' '.join([str(arg) for arg in sys.argv])} 2>&1 | tee tempout" command = f"yt-dlp --color always --proxy http://{proxy_str} {' '.join([str(arg) for arg in sys.argv])} 2>&1 | tee tempout"
subprocess.run(command, shell=True) subprocess.run(command, shell=True)
with open("tempout", 'r') as log_fl: with open("tempout", "r") as log_fl:
result = 'Sign in to' not in log_fl.read() log_text = log_fl.read()
os.remove("tempout") # Clean up after checking log file return "Sign in to" not in log_text and "403" not in log_text
return result
def main(): def main():
"""Main function to handle script arguments and execute the appropriate command.""" """Main function to handle script arguments and execute the appropriate command."""
@ -121,12 +151,15 @@ def main():
if "update" in sys.argv: if "update" in sys.argv:
update_proxies() update_proxies()
elif len(sys.argv) < 2: elif len(sys.argv) < 2:
print("usage: main.py update | <yt-dlp args> \nScript for starting yt-dlp with best free proxy\nCommands:\n update Update best proxy") print(
"usage: main.py update | <yt-dlp args> \nScript for starting yt-dlp with best free proxy\nCommands:\n update Update best proxy"
)
else: else:
sys.argv.pop(0) sys.argv.pop(0)
run_yt_dlp() run_yt_dlp()
except KeyboardInterrupt: except KeyboardInterrupt:
print("Canceled by user") print("Canceled by user")
if __name__ == "__main__": if __name__ == "__main__":
main() main()