Proxy-Server / app.py
lexicalspace's picture
Update app.py
22e098f verified
import gradio as gr
import requests
import threading
import time
import random
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor, as_completed
# ==============================================================================
# βš™οΈ CONFIGURATION (RAM EATER EDITION)
# ==============================================================================
TARGET_COUNT = 200 # Keep 20 Elite Proxies
TIMEOUT_SEC = 3 # ⚑ Speed: Kill connection if no response in 3s (was 5s)
CHECK_INTERVAL = 10 # Re-check every 2 mins
MAX_THREADS = 2000 # πŸš€ AGGRESSIVE: 300 checks at once (Uses more RAM/CPU)
# Shared Memory
proxy_storage = {
"valid_proxies": [],
"last_updated": "Not yet started"
}
# ==============================================================================
# πŸ•΅οΈβ€β™‚οΈ PROXY WORKER (Aggressive Multithreading)
# ==============================================================================
def check_proxy(ip):
"""Returns IP if valid and FAST."""
# Aggressive checking against YouTube
proxies = {"http": f"http://{ip}", "https": f"http://{ip}"}
try:
# We use a short timeout to filter out slow proxies immediately
r = requests.get("https://www.youtube.com", proxies=proxies, timeout=TIMEOUT_SEC)
if r.status_code == 200:
return ip
except:
pass
return None
def worker_loop():
while True:
print(f"\n[{datetime.now().strftime('%H:%M:%S')}] ☒️ Starting AGGRESSIVE Scan (Threads: {MAX_THREADS})...")
# 1. MASSIVE FETCH (Fill RAM with Candidates)
# We fetch from more sources to give the threads more fuel
raw_proxies = []
sources = [
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
"https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all",
"https://raw.githubusercontent.com/shiftytr/proxy-list/master/proxy.txt",
"https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt"
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all",
"https://raw.githubusercontent.com/shiftytr/proxy-list/master/proxy.txt",
"https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt",
"https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt"
"https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-http.txt",
]
print(" πŸ“₯ Downloading huge proxy lists into RAM...")
# Quick fetch loop
for s in sources:
try:
r = requests.get(s, timeout=5)
if r.status_code == 200:
raw_proxies += r.text.strip().split("\n")
except: pass
# Deduplicate and Shuffle
raw_proxies = list(set(raw_proxies))
random.shuffle(raw_proxies)
print(f" πŸ”₯ RAM Loaded with {len(raw_proxies)} candidates.")
# 2. RE-VALIDATE EXISTING + CHECK NEW (Mixed Pool)
# We combine existing good proxies with new ones to check them all at high speed
check_list = proxy_storage["valid_proxies"] + raw_proxies
# Limit checking to first 5000 to prevent freezing, but high enough to use resources
check_list = check_list[:5000]
new_valid_pool = []
print(f" πŸš€ Launching {MAX_THREADS} parallel threads...")
with ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
future_to_ip = {executor.submit(check_proxy, ip): ip for ip in check_list}
for future in as_completed(future_to_ip):
result = future.result()
if result:
if result not in new_valid_pool:
print(f" βœ… CAPTURED: {result}")
new_valid_pool.append(result)
# STOP early if we have enough good ones (to save time)
# But since you want to utilize RAM/Speed, maybe we keep going a bit longer
if len(new_valid_pool) >= TARGET_COUNT + 5: # Buffer of 5 extra
print(" 🎯 Target hit! Stopping scan early.")
executor.shutdown(wait=False, cancel_futures=True)
break
# Update Storage
proxy_storage["valid_proxies"] = new_valid_pool[:TARGET_COUNT]
proxy_storage["last_updated"] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f" πŸ’€ Sleeping for {CHECK_INTERVAL}s. Active Pool: {len(proxy_storage['valid_proxies'])}")
time.sleep(CHECK_INTERVAL)
# Start Background Thread
threading.Thread(target=worker_loop, daemon=True).start()
# ==============================================================================
# πŸ”Œ API ENDPOINT (FIXED)
# ==============================================================================
def get_proxies_api():
"""Main App calls this to get the list"""
return proxy_storage["valid_proxies"], f"Updated: {proxy_storage['last_updated']}"
with gr.Blocks() as app:
gr.Markdown("## 🚦 Proxy Engine (Backend)")
with gr.Row():
json_out = gr.JSON(label="Live Proxy Pool")
status_out = gr.Textbox(label="Last Update")
refresh_btn = gr.Button("Refresh View")
# --- THIS IS THE FIX: We added api_name="get_proxies" ---
refresh_btn.click(get_proxies_api, outputs=[json_out, status_out], api_name="get_proxies")
# Load immediately on open
app.load(get_proxies_api, outputs=[json_out, status_out])
app.launch()