1.python爬蟲瀏覽器偽裝
#導入urllib.request模塊import urllib.request#設置請求頭headers=("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0")#創建一個openeropener=urllib.request.build_opener()#將headers添加到opener中opener.addheaders=[headers]#將opener安裝為全局urllib.request.install_opener(opener)#用urlopen打開網頁data=urllib.request.urlopen(url).read().decode('utf-8','ignore')2.設置代理
#定義代理ipproxy_addr="122.241.72.191:808"#設置代理proxy=urllib.request.ProxyHandle({'http':proxy_addr})#創建一個openeropener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle)#將opener安裝為全局urllib.request.install_opener(opener)#用urlopen打開網頁data=urllib.request.urlopen(url).read().decode('utf-8','ignore')3.同時設置用代理和模擬瀏覽器訪問
#定義代理ipproxy_addr="122.241.72.191:808"#創建一個請求req=urllib.request.Request(url)#添加headersreq.add_header("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko)#設置代理proxy=urllib.request.ProxyHandle("http":proxy_addr)#創建一個openeropener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle)#將opener安裝為全局urllib.request.install_opener(opener)#用urlopen打開網頁data=urllib.request.urlopen(req).read().decode('utf-8','ignore')4.在請求頭中添加多個信息
import urllib.requestpage_headers={"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0", "Host":"www.baidu.com", "Cookie":"xxxxxxxx" }req=urllib.request.Request(url,headers=page_headers)data=urllib.request.urlopen(req).read().decode('utf-8','ignore')5.添加post請求參數
import urllib.requestimport urllib.parse#設置post參數page_data=urllib.parse.urlencode([ ('pn',page_num), ('kd',keywords) ])#設置headerspage_headers={ 'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0', 'Connection':'keep-alive', 'Host':'www.lagou.com', 'Origin':'https://www.lagou.com', 'Cookie':'JSESSIONID=ABAAABAABEEAAJA8F28C00A88DC4D771796BB5C6FFA2DDA; user_trace_token=20170715131136-d58c1f22f6434e9992fc0b35819a572b', 'Accept':'application/json, text/javascript, */*; q=0.01', 'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8', 'Referer':'https://www.lagou.com/jobs/list_%E6%95%B0%E6%8D%AE%E6%8C%96%E6%8E%98?labelWords=&fromSearch=true&suginput=', 'X-Anit-Forge-Token':'None', 'X-Requested-With':'XMLHttpRequest' }#打開網頁req=urllib.request.Request(url,headers=page_headers)data=urllib.request.urlopen(req,data=page_data.encode('utf-8')).read().decode('utf-8')6.利用phantomjs模擬瀏覽器請求
#1.下載phantomjs安裝到本地,并設置環境變量from selenium import webdriverbs=webdriver.PhantomJS()#打開urlbs.get(url)#獲取網頁源碼url_data=bs.page_source#將瀏覽到的網頁保存為圖片bs.get_screenshot_as_file(filename)
7.phantomjs設置user-agent和cookie
from selenium import webdriverfrom selenium.webdriver.common.desired_capabilities import DesiredCapabilitiesdcap = dict(DesiredCapabilities.PHANTOMJS)dcap["phantomjs.page.settings.userAgent"] = ("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0")bs = webdriver.PhantomJS(desired_capabilities=dcap)bs.get(url)#刪除cookiebs.delete_all_cookies()#設置cookie#cookie格式:在瀏覽器cookie中查看,一個cookie需要包含以下參數,domain、name、value、pathcookie={ 'domain':'.www.baidu.com', #注意前面有. 'name':'xxxx', 'value':'xxxx', 'path':'xxxx' }#向phantomjs中添加cookiebs.add_cookie(cookie)8.利用web_driver工具
#1.下載web_driver工具(如chromdriver.exe)及對應的瀏覽器#2.將chromdriver.exe放到某個目錄,如c:/chromdriver.exefrom selenium import webdriverdriver=webdriver.Chrome(executable_path="C:/chromdriver.exe")#打開urldriver.get(url)
以上這篇Python實現爬蟲設置代理IP和偽裝成瀏覽器的方法分享就是小編分享給大家的全部內容了,希望能給大家一個參考,也希望大家多多支持VEVB武林網。
新聞熱點
疑難解答