把"https://www.shicimingju.com/chaxun/zuozhe/1.html"上的对应信息提取出来,保存为excel,通过代理ip和频率限制绕过网站保护。
import requests
import re
import time
import random
import openpyxl
from bs4 import BeautifulSoup
def gethtml(url):
user_agent_list = [
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15"
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)",
"Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
"Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
"Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
"Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5",
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52",
]
random_agent = user_agent_list[random.randint(0, len(user_agent_list) - 1)]
proxy_list = [
'116.196.88.86', '106.54.221.125', '111.229.12.108',
'113.161.58.255', '223.82.106.253', '14.161.10.191',
'202.46.38.11', '82.156.171.147', '8.208.91.118',
'120.76.135.236', '101.36.160.87', '121.8.215.106',
'43.243.166.221', '8.208.91.118', '112.74.101.239'
]
proxy = {"http": str(random.choice(proxy_list))}
headers = {
'User-Agent': random_agent,
}
requests.DEFAULT_RETRIES = 5 # 增加重试连接次数
s = requests.session()
s.keep_alive = False
r=requests.get(url,headers=headers,proxies=proxy)
r.encoding='utf-8'
return r.text
def html_cst(html,data):
global n
try:
soup=BeautifulSoup(html,"html.parser")
s1=soup.find('div',class_="des")
s2= soup.find_all('div', class_="shici_list_main")
s3 = soup.find_all('div', class_="aside_val")
sd=['','','','','','','']
dz=list()
try:
sd[0]=s1.a.text#姓名
sd[1]= s3[1].text # 诗词 数量
sd[2]=s3[0].text # 朝代
sd[3]=re.split(r'[(|)]', s1.text)[1] # 生相
sd[4]= ','.join(re.findall(r'字(.*?)[,|。]', s1.text)) # 字
sd[5]=','.join(re.findall(r'号(.*?)[,|。]', s1.text)) # 号
for i in range(len(s2)):
dz.append('' + s2[i].a.text)
sd[6]=dz # 代表作
except:
try:
name=soup.find('h4')
sd[0]=''+name.text
sd[1]=s3[1].text # 诗词 数量
sd[2]=s3[0].text # 朝代
except:
pass
data.append(sd)
except:
data.append(sd)
pass
def data_save(data):
wb = openpyxl.load_workbook("D:\\python data\\诗人.xlsx")
sheet = wb['sheet']
sheet.insert_rows(2)
sheet.cell(2, 1).value = data[0][0]
sheet.cell(2, 2).value = data[0][1]
sheet.cell(2, 3).value = data[0][2]
sheet.cell(2, 4).value = data[0][3]
sheet.cell(2, 5).value = data[0][4]
sheet.cell(2, 6).value = data[0][5]
sheet.cell(2, 7).value = ''.join(data[0][6])
wb.save("D:\\python data\\诗人.xlsx")
def main():
while(1):
try:
data=list()
ul="https://www.shicimingju.com/chaxun/zuozhe/"+str(n)+".html"
n=n+1
html_cst(gethtml(ul),data)
data_save(data)
print("{0:.2f}%".format((n / 13703) * 100))
if(n%10==0):
time.sleep(0.5)
except Exception as e:
print(str(e))
if(n>13703):
break
else:
print(n)
pass
main()