Python3爬去新浪新聞寫入mysql

import pymysql
import requests
from bs4 import BeautifulSoup

url = ‘https://news.sina.com.cn/world/’
response = requests.get(url)
response.encoding = “utf-8”
html = response.text
soup = BeautifulSoup(html, “html.parser”)

db = pymysql.connect(
host=‘localhost’,
user=‘root’,
password=‘123456’,
database=‘allproject’,
port=3306
)

使用 cursor() 方法創建一個遊標對象 cursor

cursor = db.cursor()

#如果BOOK存在就刪除
cursor.execute(" DROP TABLE IF EXISTS SINA_NEWS")

#創建表
sql_CT = “”“CREATE TABLE SINA_NEWS(
NUM CHAR(4) key NOT NULL,
TITLE CHAR(100),
TIMES CHAR(20),
HREF CHAR(100))”""
cursor.execute(sql_CT)

n = 0
for newsItem in soup.select(".news-item"):
n += 1
if len(newsItem.select(“h2”)) > 0:
tittle = newsItem.select(“h2”)[0].text
href = newsItem.select(“a”)[0][“href”]
Time = newsItem.select(".time")[0].text
num = str(n).zfill(3)
print(str(n).zfill(3), tittle, Time, href)
into = “INSERT INTO SINA_NEWS(NUM, TITLE, TIMES, HREF) VALUES(%s,%s,%s,%s)”
values = (num, tittle,Time, href)
try:
cursor.execute(into, values)
# 提交到數據庫執行
db.commit()
print(“數據插入成功”)
except Exception as e:
# 如果發生錯誤則回滾
db.rollback()
print(“插入數據失敗”, str(e))

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章