一、需求背景
网站刷票时,经常会遇到限制一个ip只能投票一次的限制,为此需要使用代理ip
二、脚本如下:
1、Proxy_http.py使用代理ip发送httpr的get和post请求
#coding:utf-8 import urllib2,urllib,time,socket,random,Proxy_ip,Useragent def Visitpage(proxyip,url): socket.setdefaulttimeout(6) proxy_support = urllib2.ProxyHandler({'http':proxyip}) user_agent = random.choice(Useragent.user_agents) opener = urllib2.build_opener(proxy_support,urllib2.HTTPHandler) urllib2.install_opener(opener) try: request = urllib2.Request(url) request.add_header('Referer','http://www.baidu.com') request.add_header('User-Agent',user_agent) html = urllib2.urlopen(request).read() print html time.sleep(random.randint(60,180)) except urllib2.URLError,e: print 'URLError! The bad proxy is %s' %proxyip except urllib2.HTTPError,e: print 'HTTPError! The bad proxy is %s' %proxyip except: print 'Unknown Errors! The bad proxy is %s ' %proxyip def Clicklikebutton(proxyip,url,data): socket.setdefaulttimeout(6) proxy_support = urllib2.ProxyHandler({'http':proxyip}) user_agent = random.choice(Useragent.user_agents) opener = urllib2.build_opener(proxy_support,urllib2.HTTPHandler) try: request = urllib2.Request(url) request.add_header('Referer','http://www.baidu.com') request.add_header('User-Agent',user_agent) data = urllib.urlencode(data) resp = opener.open(request, data) print resp.read() time.sleep(random.randint(60,180)) except urllib2.URLError,e: print 'URLError! The bad proxy is %s' %proxyip except urllib2.HTTPError,e: print 'HTTPError! The bad proxy is %s' %proxyip except: print 'Unknown Errors! The bad proxy is %s ' %proxyip def main(): for i in range(len(Proxy_ip.iplist)): proxyip = Proxy_ip.iplist[i] i += 1 print proxyip for m in range(random.randint(2,4)): Clicklikebutton(proxyip,'你的post请求地址',{你的post请求参数}) if __name__ == "__main__": main()
2、Useragent.py文件为agent库
#!/usr/bin/python #-*- coding:utf-8 -*- user_agents = [ 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60' 'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50' 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50' 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0' 'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10' 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2' 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36' 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11' 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16' 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50' 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50' 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0' 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)' 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.75 Safari/537.36' ]
3、Proxy_ip.py为读取代理ip文件
#coding:utf-8 iplist = [] datafile = file( "E:\\monkey_test\\http\\1222-1.txt", "r" ) for line in datafile.readlines(): line = line.strip('\n') iplist.append(line)
4、代理ip的获取
可在以下网站获取http://www.xicidaili.com/(可自行开发脚本爬取网站的ip,容易被封,脚本在此不提供)
也可以购买http://www.daxiangdaili.com/