在使用python3爬取網頁,解析網頁,然後結果入文件,入數據庫
解析網頁用到了BeautifulSoup,入庫用到了pymsql
當然了這兩個都是第三方的庫,需要安裝
具體代碼如下:
#!/usr/bin/python
import urllib.request
import pymysql
from bs4 import BeautifulSoup
url="http://proxy.com.ru"
soup=BeautifulSoup(urllib.request.urlopen(url),from_encoding='utf-8')
#print(soup)
tables=soup.findAll('table')
i=0
j=0
for table in tables:
if i==7:
print('開始抓取解析ip')
values=[]
f=open("ip.txt","w")
#print(table)
trs=table.findAll('tr')
for tr in trs:
if j>0:
tds=tr.findAll('td')
f.write(tds[1].text+":"+tds[2].text+"\n")
values.append(tds[1].text+":"+tds[2].text)
j=j+1
f.close()
#數據庫操作
try:
conn=pymysql.connect(host='localhost',user='root',passwd='1234',db='test',charset='utf8')
cur=conn.cursor()
sql='delete from proxy_ip;insert into proxy_ip (ip) values'
dbparam=''
for param in values:
dbparam+="('"+param+"'),"
sql+=dbparam[:-1]
print(sql)
cur.execute(sql)
conn.commit()
cur.close()
conn.close()
except pymysql.Error as e:
print("pyMysql Error {0}".format(e))
break
i=i+1
print("完成")