Http/s simple python script - find and validate proxy
PerfeoDexp
User
- Thread starter
- Thread Author
- #1
script check sites, and test funded proxies, save it to valid_proxies.txt
Python:
import requests
import re
import threading
def validate_proxy(proxy):
try:
# test connection to google.com using the proxy
response = requests.get("http://www.wikipedia.org", proxies={"http": proxy, "https": proxy}, timeout=5)
if response.status_code == 200:
# valid proxy, add it to the list
with open("valid_proxies.txt", "a") as file:
file.write(proxy + "\n")
except:
pass
def get_proxies_from_website(website_url):
# scrape the website for proxies
response = requests.get(website_url)
proxies = re.findall("\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}", response.text)
return proxies
websites = [
"https://free-proxy-list.net/",
'https://www.sslproxies.org/',
"https://free-proxy-list.net/",
"https://proxy-list.download/",
"https://hidemyna.me/en/proxy-list/",
"https://spys.one/en/free-proxy-list/",
"https://xroxy.com/proxylist.php",
"http://samair.ru/proxy/",
"https://coderseek.com/free-proxy-list/",
"https://cool-proxy.net/proxies",
"https://my-proxy.com/free_proxy_list.html",
"https://proxyserverlist-24.top/",
"https://proxylisfbi.gov/",
"https://nntime.com/proxy-list/",
"http://www.gatherproxy.com/",
"https://proxies.ml/",
"https://freeopenproxy.com/",
"https://socksproxylist.net/",
"https://proxylists.net/",
"https://free-proxy-list.net/uk-proxy.html",
"https://us-proxy.org/",
"https://proxyserverlist.net/"
'https://free-proxy-list.net/',
'https://www.us-proxy.org/',
'https://www.socks-proxy.net/',
'https://hidemy.name/en/proxy-list/',
'https://www.proxynova.com/proxy-server-list/',
'https://free-proxy-list.net/uk-proxy.html',
'https://www.proxy-list.download/SOCKS5',
'https://www.proxy-list.download/HTTP',
'https://www.proxy-list.download/HTTPS',
'https://www.proxy-list.download/SOCKS4'
"https://proxy-list.download/",
"https://hidemyna.me/en/proxy-list/",
"https://spys.one/en/free-proxy-list/",
"https://xroxy.com/proxylist.php",
"http://samair.ru/proxy/",
"https://coderseek.com/free-proxy-list/",
"https://cool-proxy.net/proxies",
"https://my-proxy.com/free_proxy_list.html",
"https://proxyserverlist-24.top/",
"https://proxylisfbi.gov/",
"https://nntime.com/proxy-list/",
"http://www.gatherproxy.com/",
"https://proxies.ml/",
"https://freeopenproxy.com/",
"https://socksproxylist.net/",
"https://proxylists.net/",
"https://free-proxy-list.net/uk-proxy.html",
"https://us-proxy.org/",
"https://proxyserverlist.net/"
]
for website in websites:
proxies = get_proxies_from_website(website)
threads = [threading.Thread(target=validate_proxy, args=(proxy,)) for proxy in proxies]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
print("Validation of all proxies is complete.")