天貓超市requests爬蟲

import pymongo
import requests
from requests.exceptions import ConnectionError
from pyquery import PyQuery as pq
import time

client = pymongo.MongoClient('localhost')
db = client['tmmc']

#PROXY_POOL_URL = 'http://localhost:5555/random'
#
#proxy = None
#max_count = 5
#
#def get_proxy():
#    try:
#        response = requests.get(PROXY_POOL_URL)
#        if response.status_code == 200:
#            return response.text
#    except ConnectionError:
#        return None

headers = {
        'Cookie':''
        ,'Host':'list.tmall.com',
        'upgrade-insecure-requests':'1',
        'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36'
        }

#def get_html(url):
#    global proxy
#    try:
#        if proxy:
#            proxies = {
#                    'htttp':'http//'+proxy
#                    }
#            response =requests.get(url,allow_redirects=False,headers=headers
#                                   ,proxies=proxies
#                                   )
#        else:
#            response =requests.get(url,allow_redirects=False,headers=headers)
#            
#            
#        if response.status_code == 200:
#            return response.text
#            print('200')
#        if response.status_code == 302:
#            print('302')
#            proxy = get_proxy()
#            if proxy:
#                print('Using proxyu', proxy)
#                return get_html(url)
#            else:
#                print('Get porxy failed')
#                return None
#        if response.status_code == 404:
#            print('404')
#        
#    except ConnectionError as e:
#        print('Error Occurred:',e.args)
#        proxy = get_proxy()
#        return get_html(url)

def get_html(url):
    try:
        response =requests.get(url,allow_redirects=False,headers=headers)
        if response.status_code == 200:
            return response.text
            print('200')
        if response.status_code == 302:
            print('302')
        if response.status_code == 404:
            print('404')
        
    except ConnectionError:
        return get_html(url)

    
#base_url = 'https://list.tmall.com/search_product.htm?q=%E5%A4%A7%E7%B1%B3&sort=wd&user_id=725677994&s=80'

def save_mongo(data):
    if db['wd_sum_191223'].update({'title':data['title']},{'$set':data},True):
        print('save to mongo',data['title'])
    else:
        print('failed',data['title'])
        
def get_index(base_url):
    url = base_url
    html = get_html(url)
    doc = pq(html)
    title = doc('.product-title a').items()
    wd_sum = doc('.item-sum strong').items()
    price = doc('.item-price strong').items()
    href = doc('.product-title a').items()
    
    for o,i,e,j in zip(title,wd_sum,price,href):
        dic1 = {}
        dic1['title'] = o.text(),
        dic1['wd_sum'] = i.text(),
        dic1['price'] = e.text(),
        dic1['href'] = j.attr('href')
        save_mongo(dic1)
   
keyword = '大米'
deep = 10
   
def main():
    url = 'https://list.tmall.com/search_product.htm?q=' + keyword + '&sort=wd&user_id=725677994' +'&s='

    for i in range(deep):
        print('正在爬蟲第{}頁請稍後…………'.format(i+1))
        _url = url  + str(i*40)
        get_index(_url)
        time.sleep(60)

        
if __name__ == '__main__':
    main()
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章