Python3爬取西刺代理前2页国内高匿代理IP并验证有效性,若获取失败,使用快代理获取IP存入表格中
导入的ExcelUtil包:https://www.cdsy.xyz/computer/programme/Python/241209/cd64893.html
#!/usr/bin/env python
# coding=utf-8
# 爬取西刺代理前2页国内高匿代理IP并验证有效性
# 西刺代理若获取失败,使用快代理获取IP
# https://www.xicidaili.com/nn/1
import urllib.request
from lxml import etree
import time,random,os
from openpyxl import Workbook
from process_excel import ExcelUtil
import requests
# 获取
class GetProxy(object):
def __init__(self):
self.USER_AGENT_LIST = [
'MSIE (MSIE 6.0; X11; Linux; i686) Opera 7.23',
'Opera/9.20 (Macintosh; Intel Mac OS X; U; en)',
'Opera/9.0 (Macintosh; PPC Mac OS X; U; en)',
'iTunes/9.0.3 (Macintosh; U; Intel Mac OS X 10_6_2; en-ca)',
'Mozilla/4.76 [en_jp] (X11; U; SunOS 5.8 sun4u)',
'iTunes/4.2 (Macintosh; U; PPC Mac OS X 10.2)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:5.0) Gecko/20100101 Firefox/5.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:9.0) Gecko/20100101 Firefox/9.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:16.0) Gecko/20120813 Firefox/16.0',
'Mozilla/4.77 [en] (X11; I; IRIX;64 6.5 IP30)',
'Mozilla/4.8 [en] (X11; U; SunOS; 5.7 sun4u)',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML,like Gecko) Chrome/67.0.3396.62 Safari/537.36'
]
# 获取的所有IP列表,包含存活时间、验证时间,列表形式储存在表格中
self.list_all_ip = []
self.wb = Workbook()
self.data = ['序号','代理IP','最后验证时间','验证使用字段','是否有效']
# 获取IP
def getProxy(self):
print("获取代理IP数据中......")
for page in range(1,3):
time.sleep(2)
try:
print("尝试从西刺代理获取...")
headers = {
"Host": "www.xicidaili.com",
"User-Agent": random.choice(self.USER_AGENT_LIST),
}
url = "https://www.xicidaili.com/nn/"
request = urllib.request.Request(url+str(page),headers=headers)
response = urllib.request.urlopen(request)
html_ = response.read().decode('utf-8')
# with open("xici-" + str(page)+".html","w",encoding="utf-8")as f:
# f.write(html_)
html = etree.HTML(html_)
# 获取IP地址 116.209.56.240
ip_add = html.xpath('//*[@id="ip_list"]//tr/td[2]/text()')
# 获取端口 9999
port = html.xpath('//*[@id="ip_list"]//tr/td[3]/text()')
# 获取类型 HTTP
ip_type = html.xpath('//*[@id="ip_list"]//tr/td[6]/text()')
# 获取存活时间
# live_time = html.xpath('//*[@id="ip_list"]//tr/td[9]/text()')
# 获取验证时间
verify_time = html.xpath('//*[@id="ip_list"]//tr/td[10]/text()')
except:
print("西刺代理获取失败,使用快代理获取IP...")
headers = {
"Host": "www.kuaidaili.com",
"User-Agent": random.choice(self.USER_AGENT_LIST),
}
url = "https://www.kuaidaili.com/free/inha/"
request = urllib.request.Request(url + str(page), headers=headers)
response = urllib.request.urlopen(request)
html_ = response.read().decode('utf-8')
# with open("kuaidai-" + str(page)+".html","w",encoding="utf-8")as f:
# f.write(html_)
html = etree.HTML(html_)
# 获取IP地址 116.209.56.240
ip_add = html.xpath('//*[@id="list"]//tbody/tr/td[1]/text()')
# 获取端口 9999
port = html.xpath('//*[@id="list"]//tbody/tr/td[2]/text()')
# 获取类型 HTTP
ip_type = html.xpath('//*[@id="list"]//tbody/tr/td[4]/text()')
# 获取验证时间
verify_time = html.xpath('//*[@id="list"]//tbody/tr/td[7]/text()')
length = len(ip_add)
for i in range(length):
# urllib.request.ProxyHandler()需要字典形式参数,如下:
# {"https":"https://94.142.27.4:3128"}
ip_dic = {}
list_ip = []
time.sleep(0.02)
# print("\r获取代理IP数据进度-{:.2f}%".format((i+1)*100/length),end="")
# print("获取代理IP数据进度:第%d页(%d/%d)"%(page,(i+1),length))
# 组合后的IP地址 https://94.142.27.4:3128
group_ip_add = ip_type[i].lower() + '://' + ip_add[i] + ':' + port[i]
ip_dic[ip_type[i].lower()] = group_ip_add
list_ip.append(group_ip_add)
list_ip.append(verify_time[i])
list_ip.append(str(ip_dic))
list_ip.append("是")
self.list_all_ip.append(list_ip)
print(">>>>获取完毕。")
# 存入表格
self.process_excel(self.list_all_ip)
# 从表格获取数据验证
def isActiveProxy(self,data,filePath):
print("验证代理IP数据中......")
length = len(data)
# 统计有效的个数
count = 0
for i in range(length):
# 空代理测试用
# proxy = urllib.request.ProxyHandler({})
proxy_ip = {}
try:
# 要将str类型转换为映射关系
# print("尝试验证第%d个代理IP中..."%(i+1))
# print("\r验证代理IP数据进度:{:.2f}%".format((i + 1) * 100 / length), end="")
proxy_ip = eval(data[i]['验证使用字段'])
# 第一种方法
response = requests.get(url= 'http://www.baidu.com/', headers= {'User-Agent':random.choice(self.USER_AGENT_LIST)},proxies=proxy_ip, timeout=5)
if response.status_code == 200:
ExcelUtil(filePath).write_excel(i + 2, 5, '有效', filePath)
count += 1
# 第二种方法好像不行,进行到一点就没反应了
# proxy = urllib.request.ProxyHandler(proxy_ip)
# opener = urllib.request.build_opener(proxy)
# opener.addheaders = [("User-Agent",random.choice(self.USER_AGENT_LIST))]
# 如果这么写,就是将opener应用到全局,之后所有的,
# 不管是opener.open()还是urlopen() 发送请求,都将使用自定义代理。
# urllib.request.install_opener(opener)
# request = urllib.request.Request('http://www.baidu.com/')
# response = urllib.request.urlopen(request)
# if response.code == 200:
# ExcelUtil(filePath).write_excel(i + 2, 5, '有效', filePath)
# count += 1
except Exception as e:
print("失败IP:",proxy_ip)
ExcelUtil(filePath).write_excel(i + 2, 5, '否', filePath)
print("\n验证完毕。")
return count
# 表格处理
def process_excel(self,list_all_ip=None):
print("将获取的IP写入表格中...")
ws = self.wb.active
ws.title = "代理IP列表"
# 冻结首行
ws.freeze_panes = 'A2'
ws.column_dimensions['A'].width = 7
ws.column_dimensions['B'].width = 31
ws.column_dimensions['C'].width = 19
ws.column_dimensions['D'].width = 42
# 写表头
for head in range(1, len(self.data) + 1):
_ = ws.cell(row=1, column=head, value=self.data[head - 1])
for i in range(len(list_all_ip)):
_ = ws.cell(row=i+2, column=1, value=i+1)
for j in range(len(list_all_ip[i])):
_ = ws.cell(row=i+2, column=j+2, value=list_all_ip[i][j])
self.wb.save(self.path + '\\代理IP.xlsx')
# 判断是否有IP表格
def switch_ip(self):
# 防止反复获取数据,这里增加一个计数,大于三次停止获取
get_num = 0
self.path = os.path.dirname(os.getcwd())
while True:
# 获取当前目录下所有文件
dirlist = os.listdir(self.path)
if get_num < 3:
if '代理IP.xlsx' in dirlist:
print("本地有表格。")
# 如果有就去验证
data = ExcelUtil(self.path+"\\代理IP.xlsx").dict_data()
# 有效个数大于等于6返回真
count = self.isActiveProxy(data,self.path+"\\代理IP.xlsx")
if count >= 6:
return data
else:
get_num += 1
print("有效IP个数:%d小于6,重新获取:第%d次"%(count,get_num))
# 去获取
self.getProxy()
else:
print("无表格,需要获取...")
self.getProxy()
# self.process_excel()
get_num += 1
else:
print("ERROR:同一时间获取IP次数超过三次,请稍后再试!")
return False
if __name__ == "__main__":
get = GetProxy()
get.switch_ip()