1.參考
https://doc.scrapy.org/en/latest/topics/spiders.html#scrapy.spiders.Spider.start_requests
自動提交 login.php 返回表單
2.模擬登錄雪球
# -*- coding: utf-8 -*- import os import scrapy from scrapy.shell import inspect_response # https://doc.scrapy.org/en/latest/topics/spiders.html start_requests() 章節 class LoginSpider(scrapy.Spider): name = 'login' allowed_domains = ['xueqiu.com'] # start_urls = ['http://xueqiu.com/'] #The default implementation generates Request(url, dont_filter=True) for each url in start_urls. url_login = 'https://xueqiu.com/snowman/login', url_somebody = 'https://xueqiu.com/u/6146070786' data_dict = { 'remember_me': 'true', # 'username': 'fake', #返回200 {"error_description":"用戶名或密碼錯誤","error_uri":"/provider/oauth/token","error_code":"20082"} 'username': os.getenv('xueqiu_username'), 'password': os.getenv('xueqiu_password'), } def start_requests(self): return [scrapy.FormRequest(url = self.url_login, headers={'X-Requested-With': 'XMLHttpRequest'}, #否則404將導致退出,抓包頁面顯示登錄成功 meta={'proxy': 'http://127.0.0.1:8888'}, #否則fiddler導致返回緩慢 formdata = self.data_dict, callback=self.logged_in)] def logged_in(self, response): # inspect_response(response, self) assert os.getenv('xueqiu_nickname') in response.text #AssertionError 將導致退出 return scrapy.Request(self.url_somebody, dont_filter=True, meta={'proxy': 'http://127.0.0.1:8888'}) def parse(self, response): # inspect_response(response, self) self.log(os.getenv('xueqiu_nickname') in response.text)