写在前面的话,还处于爬虫初期,很多东西一知半解,边学边记录,边学边做。代码写的自己都看不下去了。。。。
本期重点,美团网商铺数据,暂只抓了美食商家数据。先上战果,暂只抓了10万条,一小时左右,未对数据去重。大概思路如下,先抓取各个省份城市,然后获取其经纬度,最后构造参数,翻页拿取数据。抓取结果
- 获取各个城市名字,id。第一个地址(https://www.meituan.com/ptapi/getprovincecityinfo/)
- 获取各个城市经纬度,构造参数。第二个地址(https://apis.map.qq.com/jsapi?qt=poi&wd=西安&pn=0&rn=10&rich_source=qipao&rich=web&nj=0&c=1&key=FBOBZ-VODWU-C7SVF-B2BDI-UK3JE-YBFUS&output=jsonp&pf=jsapi&ref=jsapi&cb=qq.maps._svcb3.search_service_0&)
- 构建请求参数,翻页获取数据。第三个地址(https://www.meituan.com/meishi/api/poi/getNearPoiList?offset=0&limit=10&cityId=1&lat=39.950256&lng=116.34784)此地址浏览器拿不到数据,需要用到postman测试,是个坑
抓包的查找接口的过程如下
再切换城市栏目可以通过chrome抓包拿到所有城市的id,name,便于我们下一步去获取经纬度参数,地址见第一条
然后到首页点击美食,随便点进一个商家 ,抓包拿到下面信息。
postman测试参数如下图。
有个小坑需要注意,headrs请求头如下图
返回数据如下图
至此我们已经知晓大概流程。以及关键参数,下面就是代码实现模块。代码中涉及到的数据库连接方式。查询、插入操作都需要重写,代码中为自己封装的一些默认连接。
import requests
import json
import re
import time
import logging
logging.captureWarnings(True)
from multiprocessing import Pool
class MeiTuan:
def __init__(self):
self.headers= {
"Content-Type": "application/json;charset=utf-8",
"Host": "www.meituan.com",
"Referer": "https://www.meituan.com/meishi/4813791/",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
}
self.re_w_c = re.compile(r'"city":(.*?)"area')
self.collect = Mongo(db_name='meituan', collecttion_name='shop')#自己写的mongo连接,需要做更改
def all_response(self,url):
"""一个时常用到的请求函数
"""
response = requests.get(url=url,headers=self.headers,verify =False)
return response
def get_city_id(self):
"""
获取城市id
:return:mongo里面meituan /shop
"""
url = "https://www.meituan.com/ptapi/getprovincecityinfo/"
response = self.all_response(url=url)
response = json.loads(response.text)
for provinces in response:
city_info = {}
city_info['provinceName'] = provinces['provinceName']
city_info['city'] =[]
for city in provinces['cityInfoList']:
city_info['city'].append({"{}".format(city['name']):"{}".format(city['id'])})
self.collect.collect.insert_one(city_info)#这个需要重写。
def get_city_weidu(self):
"""获取经纬度函数"""
url = 'https://apis.map.qq.com/jsapi'
collect = Mongo(db_name='meituan', collecttion_name='new_shop')
citynames = self.collect.collect.find()#重写
for cityname in citynames:
print(cityname)
city_info = {}
city_info['provinceName'] = cityname['provinceName']
city_info['city'] = []
for name in cityname['city']:
w_c={}
new_name = list(name.keys())[0]
params ={
"qt": "poi",
"wd": "%s"%new_name,
"pn": "0",
"rn": "10",
"rich_source": "qipao",
"rich": "web",
"nj": "0",
"c": "1",
"key": "FBOBZ-VODWU-C7SVF-B2BDI-UK3JE-YBFUS",
"output": "jsonp",
"pf": "jsapi",
"ref": "jsapi",
"cb": "qq.maps._svcb3.search_service_0",
}
response = requests.get(url=url,params=params)
result_1 = response.text.replace('qq.maps._svcb3.search_service_0 && qq.maps._svcb3.search_service_0(',"")
try:
result =json.loads(result_1[:-1])
w_c['lng']= result['detail']['city']['pointx']
w_c['lat'] = result['detail']['city']['pointy']
name.update(w_c)
city_info['city'].append(name)
except:
pass
print(city_info)
collect.collect.insert_one(city_info)
然后我们用多线程,测试10个线程,一小时10万条数据,只怕了很少一部分,未封ip
def ceshi(data):
con= redis_r()
# client = Mongo(db_name='meituan', collecttion_name='shop_info')
headers={
"Content-Type": "application/json;charset=utf-8",
"Host": "www.meituan.com",
"Referer": "https://www.meituan.com/meishi/4813791/",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
}
start_url = 'https://www.meituan.com/meishi/api/poi/getNearPoiList'
# print(data['provinceName'])
for city in data['city']:
city_id = city[list(city.keys())[0]]
city_name = list(city.keys())[0]
# print(" "*10,city_name)
lat =city['lat']
lng =city['lng']
num = 0
while True:
params = {
"offset": "%s"%str(num),
"limit": "10",
"cityId": "%s"%str(city_id),
"lat": "%s"%str(lat),
"lng": "%s"%str(lng),
}
res = con.sadd("old", "%s%s%s"%(data['provinceName'],city_name,num))#做了一个断点续爬的小操作,使用省+市+当前页数做flag,想停就停。
if res == 1:
response = requests.get(url=start_url,params=params,headers =headers)
print(response.url,"---------")
time.sleep(2)
try:
print(response.text)
datas = json.loads(response.text)
if datas['data'] == []:
break
for i in datas['data']:
k = {'provinceName': data['provinceName'], "city_name": city_name}
k.update(i)
print(k)
print("当前省份%s当前城市%s目前第%s页"%(data['provinceName'],city_name,num))
client.collect.insert_one(k)
except:
pass
num += 1
if __name__ == '__main__':
client = Mongo(db_name='meituan', collecttion_name='new_shop')
city_info = client.collect.find()
result = []
for i in city_info:
result.append(i)
mt = MeiTuan()
p = Pool(10)
for provinces in result:
p.apply_async(ceshi, args=(provinces,))
p.close()
p.join()
至此,美团的爬虫已经完结