Python爬蟲如何設置代理IP和僞裝成瀏覽器?

一 .python爬蟲瀏覽器僞裝

1 #導入urllib.request模塊
2 import urllib.request
3 #設置請求頭
4 headers=("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0")
5 #創建一個opener
6 opener=urllib.request.build_opener()
7 #將headers添加到opener中
8 opener.addheaders=[headers]
9 #將opener安裝爲全局
10 urllib.request.install_opener(opener)
11 #用urlopen打開網頁
12 data=urllib.request.urlopen(url).read().decode('utf-8','ignore')

二 .設置代理

1 #定義代理ip
2 proxy_addr="122.241.72.191:808"
3 #設置代理
4 proxy=urllib.request.ProxyHandle({'http':proxy_addr})
5 #創建一個opener
6 opener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle)
7 #將opener安裝爲全局
8 urllib.request.install_opener(opener)
9 #用urlopen打開網頁
10 data=urllib.request.urlopen(url).read().decode('utf-8','ignore')

三 .同時設置用代理和模擬瀏覽器訪問

1 #定義代理ip
2 proxy_addr="122.241.72.191:808"
3 #創建一個請求
4 req=urllib.request.Request(url)
5 #添加headers
6 req.add_header("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko)
7 #設置代理
8 proxy=urllib.request.ProxyHandle("http":proxy_addr)
9 #創建一個opener
10 opener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle)
11 #將opener安裝爲全局
12 urllib.request.install_opener(opener)
13 #用urlopen打開網頁
14 data=urllib.request.urlopen(req).read().decode('utf-8','ignore')

四 .在請求頭中添加多個信息

1 import urllib.request
2 page_headers={"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0",
3 "Host":"www.baidu.com",
4 "Cookie":"xxxxxxxx"
5 }
6 req=urllib.request.Request(url,headers=page_headers)
7 data=urllib.request.urlopen(req).read().decode('utf-8','ignore')

五 .添加post請求參數

1 import urllib.request
2 import urllib.parse
3 #設置post參數
4 page_data=urllib.parse.urlencode([
5 ('pn',page_num),
6 ('kd',keywords)
7 ])
8 #設置headers
9 page_headers={
10 'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0',
11 'Connection':'keep-alive',
12 'Host':'www.lagou.com',
13 'Origin':'https://www.lagou.com',
14 'Cookie':'JSESSIONID=ABAAABAABEEAAJA8F28C00A88DC四D771796BB5C6FFA2DDA; user_tracetoken=20170715131136-d58c1f22f6434e9992fc0b35819a572b',
15 'Accept':'application/json, text/javascript, /; q=0.01',
16 'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
17 'Referer':'https://www.lagou.com/jobs/list
%E6%95%B0%E6%8D%AE%E6%8C%96%E6%8E%98?labelWords=&fromSearch=true&suginput=',
18 'X-Anit-Forge-Token':'None',
19 'X-Requested-With':'XMLHttpRequest'
20 }
21 #打開網頁
22 req=urllib.request.Request(url,headers=page_headers)
23 data=urllib.request.urlopen(req,data=page_data.encode('utf-8')).read().decode('utf-8')

六 .利用phantomjs模擬瀏覽器請求

1 #1.下載phantomjs安裝到本地,並設置環境變量
2 from selenium import webdriver
3 bs=webdriver.PhantomJS()
4 #打開url
5 bs.get(url)
6 #獲取網頁源碼
7 url_data=bs.page_source
8 #將瀏覽到的網頁保存爲圖片
9 bs.get_screenshot_as_file(filename)

七 .phantomjs設置user-agent和cookie

1 from selenium import webdriver
2 from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
3 dcap = dict(DesiredCapabilities.PHANTOMJS)
4 dcap["phantomjs.page.settings.userAgent"] = ("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/ 49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0")
5 bs = webdriver.PhantomJS(desired_capabilities=dcap)
6 bs.get(url)
7 #刪除cookie
8 bs.delete_all_cookies()
9 #設置cookie
10 #cookie格式:在瀏覽器cookie中查看,一個cookie需要包含以下參數,domain、name、value、path
11 cookie={
12 'domain':'.www.baidu.com', #注意前面有.
13 'name':'xxxx',
14 'value':'xxxx',
15 'path':'xxxx'
16 }
17 #向phantomjs中添加cookie
18 bs.add_cookie(cookie)

八 .利用web_driver工具

1 #1.下載web_driver工具(如chromdriver.exe)及對應的瀏覽器
2 #2.將chromdriver.exe放到某個目錄,如c:\chromdriver.exe
3 from selenium import webdriver
4 driver=webdriver.Chrome(executable_path="C:\chromdriver.exe")
5 #打開url
6 driver.get(url)

  文章來源:   http://www.kxdaili.com/daili/ip/3237.html                
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章