分享一个获取代理ip的python函数

分享一个获取代理ip的python函数

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#coding:utf-8
from bs4 import BeautifulSoup
import requests
import random
 
def getproxyip():
headers = {
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding':'gzip,deflate,sdch',
'Host':'www.ip-adress.com',
'User-Agent':'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0'
}
 
proxy_req = requests.get(proxy_url,headers=headers)
proxy_soup = BeautifulSoup(proxy_req.text)
proxy_ip = proxy_soup.find_all('tr',{'class':'yellow'})
return proxy_ip[random.randrange(0,4)].td.text

python编写的自动获取代理IP列表的爬虫

#/usr/local env python
#coding utf-8
import os
import urllib
from bs4 import BeautifulSoup

def log():
    f=open("f:\daili.txt",'a')
    f.write(ip)
    f.close()

def fenxi():
    page = urllib.urlopen(url)
    data = page.read()
    soup=BeautifulSoup(data)
    #print soup
    list=soup.find_all('span')
    for i in list:
        #print i.get_text()
        global ip
        ip= i.get_text()
        s="\n".join(ip.split('#'))
        print s
        log()  
for i in range(1,10):
    if i==1:
        url = 'http://www.youdaili.cn/Daili/http/556.html'
        print url
        fenxi()
    else:
        url = 'http://www.youdaili.cn/Daili/http/556_'+str(i)+'.html'
        print url
        fenxi()
        
       运行结果:
||||||||IP知识库最新代理IPwww.Youdaili.Cn07-09免费代理国内外http代理ip地址1.186.200.211:8080@HTTP#印度

2.135.238.92:9090@HTTP#哈萨克斯坦

2.181.177.7:8080@HTTP#伊朗

2.183.155.2:8082@HTTP#伊朗

上一篇:用 Python 制作一个艺术签名小工具,给自己设计一个优雅的签名


下一篇:scrapy爬虫学习系列五:图片的抓取和下载