爬蟲07 —— 正則表達式 與 Requests庫 綜合練習

"""
@author: Bre Athy
@contact: https://www.zhihu.com/people/you-yi-shi-de-hu-xi
@productware: PyCharm
@file: Requests&正則表達式爬取貓眼電影Top100.py
@time: 2020/6/4 11:29
"""
import requests, re, json
from multiprocessing import Pool
from requests.exceptions import RequestException


def get_one_page(url):
    HEADERS = {
        'User-Agent': "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
    }
    try:
        response = requests.get(url, headers=HEADERS)
        if response.status_code == 200:
            return response.content.decode('utf-8')
        return None
    except RequestException:
        return None

def parse_one_page(html):
    pattern = re.compile('<dd>.*?board-index.*?>(\d+)</i>.*?data-src="(.*?)".*?name"><a.*?>(.*?)</a>.*?star">(.*?)</p>.*?releasetime">(.*?)</p>.*?integer">(.*?)</i>.*?fraction">(.*?)</i>.*?</dd>', re.S)
    items = re.findall(pattern, html)
    for item in items:
        yield {
            'index':item[0],
            'image':item[1],
            'title':item[2],
            'actor':item[3].strip()[3:],
            'time':item[4].strip()[5:],
            'score':item[5]+item[6]
        }

def write_to_file(content):
    with open('result.txt', 'a', encoding='utf-8') as f:
        f.write(json.dumps(content, ensure_ascii=False) + '\n')


def main(offset):
    url = "https://maoyan.com/board/4?offset="+str(offset)
    html = get_one_page(url)
    for item in parse_one_page(html):
        print(item)
        write_to_file(item)


if __name__ == "__main__":
    pool = Pool()
    pool.map(main, [i*10 for i in range(10)])
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章