Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from flask import Flask, render_template
|
2 |
+
import requests
|
3 |
+
import time
|
4 |
+
from apscheduler.schedulers.background import BackgroundScheduler
|
5 |
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
6 |
+
|
7 |
+
app = Flask(__name__)
|
8 |
+
|
9 |
+
# Global list to store working proxies with geolocation data
|
10 |
+
working_proxies = []
|
11 |
+
|
12 |
+
# Function to get the country for a given IP
|
13 |
+
def get_country(ip):
|
14 |
+
try:
|
15 |
+
response = requests.get(f"http://ip-api.com/json/{ip}")
|
16 |
+
data = response.json()
|
17 |
+
return data.get("country", "Unknown")
|
18 |
+
except Exception:
|
19 |
+
return "Unknown"
|
20 |
+
|
21 |
+
# Function to check if a proxy is working and fetch its country
|
22 |
+
def check_proxy(proxy):
|
23 |
+
start_time = time.perf_counter()
|
24 |
+
try:
|
25 |
+
with requests.Session() as session:
|
26 |
+
response = session.head("https://www.google.com/", proxies={'http': proxy, 'https': proxy}, timeout=1) # Reduced timeout to 1 second
|
27 |
+
if response.status_code == 200:
|
28 |
+
elapsed_time = time.perf_counter() - start_time
|
29 |
+
ip = proxy.split(':')[0]
|
30 |
+
country = get_country(ip)
|
31 |
+
return proxy, elapsed_time, country
|
32 |
+
except Exception:
|
33 |
+
return None
|
34 |
+
|
35 |
+
# Function to fetch and validate proxies
|
36 |
+
def fetch_and_check_proxies():
|
37 |
+
global working_proxies
|
38 |
+
try:
|
39 |
+
print("Fetching proxies...")
|
40 |
+
resp = requests.get("https://api.proxyscrape.com/v2/?request=displayproxies&protocol=http&timeout=10000&country=all&ssl=all&anonymity=all")
|
41 |
+
proxies = [proxy.strip() for proxy in resp.text.strip().split("\n") if proxy.strip()]
|
42 |
+
|
43 |
+
print(f"Found {len(proxies)} proxies. Checking their validity...")
|
44 |
+
|
45 |
+
temp_working_proxies = []
|
46 |
+
with ThreadPoolExecutor(max_workers=200) as executor:
|
47 |
+
futures = {executor.submit(check_proxy, proxy): proxy for proxy in proxies}
|
48 |
+
|
49 |
+
for future in as_completed(futures):
|
50 |
+
result = future.result()
|
51 |
+
if result:
|
52 |
+
temp_working_proxies.append(result)
|
53 |
+
|
54 |
+
# Sort proxies by response time and update the global list
|
55 |
+
temp_working_proxies.sort(key=lambda x: x[1])
|
56 |
+
working_proxies = temp_working_proxies
|
57 |
+
|
58 |
+
print(f"Found {len(working_proxies)} working proxies.")
|
59 |
+
|
60 |
+
except Exception as e:
|
61 |
+
print(f"Error occurred: {e}")
|
62 |
+
|
63 |
+
# Schedule the proxy fetching every 10 minutes (can be adjusted as needed)
|
64 |
+
scheduler = BackgroundScheduler()
|
65 |
+
scheduler.add_job(func=fetch_and_check_proxies, trigger="interval", minutes=10)
|
66 |
+
scheduler.start()
|
67 |
+
|
68 |
+
# Route to display the working proxies
|
69 |
+
@app.route('/')
|
70 |
+
def home():
|
71 |
+
return render_template('index.html', proxies=working_proxies)
|
72 |
+
|
73 |
+
if __name__ == "__main__":
|
74 |
+
app.run(host='0.0.0.0', port=5000)
|