西刺代理多进程爬取
import requests
from lxml import etree
import time
import multiprocessing
# 耗时 84.26855897903442 5
# 耗时 44.181687355041504 10
# 耗时 29.013262033462524 20
# 耗时 22.825448036193848 50
def get_all_proxy(queue):
url = \'http://www.xicidaili.com/nn/1\'
headers = {
\'User-Agent\': \'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36\',
}
response = requests.get(url, headers=headers)
# with open(\'song.html\', \'wb\') as f:
# f.write(response.content)
html_ele = etree.HTML(response.text)
ip_eles = html_ele.xpath(\'//table[@id="ip_list"]/tr/td[2]/text()\')
port_ele = html_ele.xpath(\'//table[@id="ip_list"]/tr/td[3]/text()\')
# print(len(ip_eles))
# print(len(port_ele))
proxy_list = []
for i in range(0,len(ip_eles)):
proxy_str = \'http://\' + ip_eles[i] + \':\' + port_ele[i]
#proxy_list.append(proxy_str)
#print(proxy_str)
queue.put(proxy_str)
def check_one_proxy(proxy):
#print(\'人头已经打到这里\')
try:
#proxy = proxy_and_queue
url = \'http://www.baidu.com/s?wd=ip\'
proxy_dict = {
\'http\': proxy
}
try:
response = requests.get(url, proxies=proxy_dict, timeout=5)
if response.status_code == 200:
print(\'这个人头送的好\' + proxy)
#queue.put(proxy)
return proxy
else:
print(\'这个人头没送好\')
#queue.put(proxy)
return proxy
except:
#print(\'这个人头耶耶耶没送好--------------->\')
return None
except Exception as e:
print(e)
# def check_all_proxy(proxy_list):
# valid_proxy_list = []
# for proxy in proxy_list:
# if check_one_proxy(proxy):
# valid_proxy_list.append(proxy)
#
# return valid_proxy_list
if __name__ == \'__main__\':
start_time = time.time()
# 创建队列
q = multiprocessing.Queue()
# pool 进程池中, 要用的是下面的这个queue
#result_q = multiprocessing.Manager().Queue()
# 获取所有代理
p = multiprocessing.Process(target=get_all_proxy, args=(q,))
p.start()
# proxy_list = get_all_proxy()
# 检测代理的可用性
pool = multiprocessing.Pool(50)
result_list = []
while True:
try:
proxy_str = q.get(timeout=5)
#print(\'人头送到这里了\', proxy_str)
except:
break
#print(\'apply_async 之前\')
#proxy_and_queue = [proxy_str, result_q]
proxy_res = pool.apply_async(check_one_proxy, (proxy_str,))
result_list.append(proxy_res)
#valid_proxy_list = check_all_proxy(proxy_list)
valid_proxy_list = []
for proxy_res in result_list:
result = proxy_res.get()
if result is None:
pass
else:
valid_proxy_list.append(result)
#print(result)
print(\'All proxy we can get:\')
print(valid_proxy_list)
pool.close()
pool.join()
p.join()
end_time = time.time()
print(\'--\'*30)
# print(valid_proxy_list)
print(\'耗时:\' + str(end_time-start_time))
import requests
from lxml import etree
import time
import multiprocessing
# 耗时 84.26855897903442 5
# 耗时 44.181687355041504 10
# 耗时 29.013262033462524 20
# 耗时 22.825448036193848 50
def get_all_proxy(queue):
url = \'http://www.xicidaili.com/nn/1\'
headers = {
\'User-Agent\': \'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36\',
}
response = requests.get(url, headers=headers)
# with open(\'song.html\', \'wb\') as f:
# f.write(response.content)
html_ele = etree.HTML(response.text)
ip_eles = html_ele.xpath(\'//table[@id="ip_list"]/tr/td[2]/text()\')
port_ele = html_ele.xpath(\'//table[@id="ip_list"]/tr/td[3]/text()\')
# print(len(ip_eles))
# print(len(port_ele))
proxy_list = []
for i in range(0,len(ip_eles)):
proxy_str = \'http://\' + ip_eles[i] + \':\' + port_ele[i]
#proxy_list.append(proxy_str)
#print(proxy_str)
queue.put(proxy_str)
def check_one_proxy(proxy):
#print(\'人头已经打到这里\')
try:
#proxy = proxy_and_queue
url = \'http://www.baidu.com/s?wd=ip\'
proxy_dict = {
\'http\': proxy
}
try:
response = requests.get(url, proxies=proxy_dict, timeout=5)
if response.status_code == 200:
print(\'这个人头送的好\' + proxy)
#queue.put(proxy)
return proxy
else:
print(\'这个人头没送好\')
#queue.put(proxy)
return proxy
except:
#print(\'这个人头耶耶耶没送好--------------->\')
return None
except Exception as e:
print(e)
# def check_all_proxy(proxy_list):
# valid_proxy_list = []
# for proxy in proxy_list:
# if check_one_proxy(proxy):
# valid_proxy_list.append(proxy)
#
# return valid_proxy_list
if __name__ == \'__main__\':
start_time = time.time()
# 创建队列
q = multiprocessing.Queue()
# pool 进程池中, 要用的是下面的这个queue
#result_q = multiprocessing.Manager().Queue()
# 获取所有代理
p = multiprocessing.Process(target=get_all_proxy, args=(q,))
p.start()
# proxy_list = get_all_proxy()
# 检测代理的可用性
pool = multiprocessing.Pool(50)
result_list = []
while True:
try:
proxy_str = q.get(timeout=5)
#print(\'人头送到这里了\', proxy_str)
except:
break
#print(\'apply_async 之前\')
#proxy_and_queue = [proxy_str, result_q]
proxy_res = pool.apply_async(check_one_proxy, (proxy_str,))
result_list.append(proxy_res)
#valid_proxy_list = check_all_proxy(proxy_list)
valid_proxy_list = []
for proxy_res in result_list:
result = proxy_res.get()
if result is None:
pass
else:
valid_proxy_list.append(result)
#print(result)
print(\'All proxy we can get:\')
print(valid_proxy_list)
pool.close()
pool.join()
p.join()
end_time = time.time()
print(\'--\'*30)
# print(valid_proxy_list)
print(\'耗时:\' + str(end_time-start_time))