要导入fake-useragent库,需要先用pip安装,安装命令:pip install fake-useragent
params是爬虫伪装的参数,数据类型为字典dict,里面有2个键值对,2个键:headers、proxies。
headers的数据类型是字典,里面有1个键值对,键User-Agent对应的值数据类型为字符串,User-Agent中文翻译是用户代理。
proxies的数据类型是字典,里面有1个键值对,键http对应的值数据类型为字符串,是代理服务器的url。
匿名ip主要是从66ip.cn网站获取。
import requests
from bs4 import BeautifulSoup as bs
from fake_useragent import UserAgent
import random
def getSoup(url,encoding="utf-8",**params):
print(params)
reponse = requests.get(url,**params)
reponse.encoding = encoding
soup = bs(reponse.text,'lxml')
return soup
def cssFind(movie,cssSelector,nth=1):
if len(movie.select(cssSelector)) >= nth:
return movie.select(cssSelector)[nth-1].text.strip()
else:
return ''
def getProxyList():
proxies_url_before = "http://www.66ip.cn/areaindex_2/{}.html"
proxies_url = proxies_url_before.format(random.randint(1,10))
soup = getSoup(proxies_url)
item_list = soup.select("table tr")[2:]
proxies_list = []
for item in item_list:
ipAddress = cssFind(item, "td")
ipPort = cssFind(item, "td", 2)
proxies_list.append("http://{}:{}".format(ipAddress, ipPort))
return proxies_list
def getParams():
ua = UserAgent()
ip_list = getProxyList()
params = dict(
headers = {'User-Agent': ua.random},
proxies = {'http': random.choice(ip_list)}
)
return params
if __name__ == "__main__":
params = getParams()
soup = getSoup("https://movie.douban.com/top250?start=50",**params)