原生爬蟲爬取數據存入數據庫的簡單過程

本文章實現用Python3 藉助pymysql模塊爬取鬥魚一些直播信息 並存入數據庫中的簡單操作

第一步:爬取網站的直播信息並處理

第二步:存入本地*.(txt)

第三步:存入數據庫

import  re
from urllib import request
import pymysql

class Spider():
    url = 'https://www.huya.com/g/wzry'
    root_pattern = '<span class="txt">([\s\S]*?)</li>'
    name_pattern = '<i class="nick" title="([\s\S]*?)">'
    number_pattern = '<i class="js-num">([\s\S]*?)</i>'
    def _fetch_content(self):
        r = request.urlopen(Spider.url)
        htmls = r.read()
        htmls = str(htmls,encoding='utf-8')
        #htmls = r.read().decode('utf-8')
        return htmls
    def refine(self,anchors):
        l = lambda anchor:{'name':anchor['name'][0].strip(),'number':anchor['number'][0]}
        return map(l,anchors)
    def sort__seed(self,anchor):
        r = re.findall('\d*',anchor['number'])
        number = float(r[0])
        if '萬' in anchor['number']:
               number *= 10000
        return  number
    def sort__rank(self,anchors):
        return sorted(anchors,key = self.sort__seed,reverse = True)
    def __analysis(self,htmls):
        root_html = re.findall(Spider.root_pattern,htmls)
        anchors = []
        for html in root_html:
            number = re.findall(Spider.number_pattern, html)
            name = re.findall(Spider.name_pattern,html)
            anchor = {'name':name,'number':number}
            anchors.append(anchor)
        return anchors
    def __show(self,anchors):
        for anchor in anchors:
            print('name : '+ anchor['name']+' number : '+anchor['number'])
    def __write(self,anchors):
        fr = open("d:\spider__write.txt", "w")
        fr.write('id+^+name+^+viewer__number\n')
        for anchor in anchors:
            fr.write(anchor['name']+'^'+anchor['number']+'\n')
        fr.close()
    def read__db(self,anchors):
        db = pymysql.connect("localhost", "root", "123456", "imooc")
        cursor = db.cursor()
        count = 0
        fr = open("d:\spider__write.txt", "r")
        try:
            for line in fr:
                count += 1
                if(count == 1):
                    continue
                line = line.strip().split('^')
                # cursor.execute("insert into rank__list(name,viewer__number) values(%s,%s)", [line[0],line[1]])
                cursor.execute("insert into rank__list(name,viewer__number) values('%s','%s')" %(line[0],line[1]))
                db.commit()
        except Exception as e:
            print(e)
            db.rollback()
        cursor.close()
        db.close()
        fr.close()
    def go(self):
        htmls =  self._fetch_content()
        anchors = self.__analysis(htmls)
        anchors = list(self.refine(anchors))
        anchors = self.sort__rank(anchors)
        # self.__show(anchors)
        self.__write(anchors)
        self.read__db(anchors)
spider = Spider()
spider.go()

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章