python网络爬虫实战——实时抓取西刺免费代理ip
生活随笔
收集整理的這篇文章主要介紹了
python网络爬虫实战——实时抓取西刺免费代理ip
小編覺得挺不錯的,現(xiàn)在分享給大家,幫大家做個參考.
參考網(wǎng)上高手示例程序,利用了多線程技術(shù),Python版本為2.7
#-*-coding:utf8-*-import urllib2 import re import threading import timerawProxyList = [] checkedProxyList = []#抓取代理網(wǎng)站 targets=[] for i in range(1,6):target = r"http://www.xici.net.co/nn/%d" % itargets.append(target) # print targets#正則 p = re.compile(r'''<tr class=".+?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>.+?(\d{2,4})</td>.+?<td>(.{4,5})</td>''',re.DOTALL)#獲取代理的類 class ProxyGet(threading.Thread):def __init__(self,target):threading.Thread.__init__(self)self.target = targetdef getProxy(self):print "目標網(wǎng)站:"+self.targetreq = urllib2.urlopen(self.target)result = req.read()matchs = p.findall(result)for row in matchs:ip = row[0]port = row[1]agent = row[2]addr=agent+'://'+ip+':'+portproxy = [ip,port,addr]rawProxyList.append(proxy)def run(self):self.getProxy()#檢驗代理類 class ProxyCheck(threading.Thread):def __init__(self,proxyList):threading.Thread.__init__(self)self.proxyList = proxyListself.timeout=5self.testUrl = "http://www.baidu.com/"self.testStr = "030173"def checkProxy(self):cookies = urllib2.HTTPCookieProcessor()for proxy in self.proxyList:proxyHandler = urllib2.ProxyHandler({"http" : r'http://%s:%s' %(proxy[0],proxy[1])})opener=urllib2.build_opener(cookies,proxyHandler)opener.addheaders =[('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36')]t1 = time.time()try:req = opener.open(self.testUrl,timeout=self.timeout)result=req.read()timeused = time.time()-t1pos = result.find(self.testStr)if pos > 1:checkedProxyList.append((proxy[0],proxy[1],proxy[2],timeused))else:continueexcept Exception,e:continuedef run(self):self.checkProxy()if __name__ == "__main__":getThreads=[]checkThreads=[]#對每個目標網(wǎng)站開啟一個線程負責抓取代理 for i in range(len(targets)):t = ProxyGet(targets[i])getThreads.append(t)for i in range(len(getThreads)):getThreads[i].start()for i in range(len(getThreads)):getThreads[i].join()print '.'*10+"總共抓取了%s個代理" %len(rawProxyList) +'.'*10#開啟20個線程負責校驗,將抓取到的代理分成20份,每個線程校驗一份 for i in range(20):t = ProxyCheck(rawProxyList[((len(rawProxyList)+19)/20) * i:((len(rawProxyList)+19)/20) * (i+1)])checkThreads.append(t)for i in range(len(checkThreads)):checkThreads[i].start()for i in range(len(checkThreads)):checkThreads[i].join()print '.'*10+"總共有%s個代理通過校驗" %len(checkedProxyList) +'.'*10#持久化 f= open("proxy_list.txt",'w+') for proxy in sorted(checkedProxyList,cmp=lambda x,y:cmp(x[3],y[3])):print "checked proxy is: %s:%s\t%s\t%s" %(proxy[0],proxy[1],proxy[2],proxy[3])f.write("%s:%s\t%s\t%s\n"%(proxy[0],proxy[1],proxy[2],proxy[3])) f.close()總結(jié)
以上是生活随笔為你收集整理的python网络爬虫实战——实时抓取西刺免费代理ip的全部內(nèi)容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: char **p, char a[16]
- 下一篇: Swift 将日期转化为字符串,显示上午