「docker實戰篇」python的docker爬蟲技術-python
上次已經分析出來具體的app的請求連線了,本次主要說說python的開發,抓取APP裡面的資訊。原始碼:
分析app資料包
檢視分析
解析出來的header
夜神配置
python程式碼,爬取分類
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/9 11:06
# @Author : lm
# @Url : idig8.com
# @Site :
# @File : spider_douguomeishi.py
# @Software: PyCharm
import requests
#header內容比較多,因為各個廠家的思路不同,
#fiddler爬取出來的欄位比較多,有些內容應該是非必填的,只能在實際的時候嘗試註釋一些來試。
def handle_request(url,data):
header ={
"client": "4",
"version": "6916.2",
"device": "SM-G955N",
"sdk": "22,5.1.1",
"imei": "354730010002552",
"channel": "zhuzhan",
"mac": "00:FF:E2:A2:7B:58",
"resolution": "1440*900",
"dpi":"2.0",
"android-id":"bcdaf527105cc26f",
"pseudo-id":"354730010002552",
"brand":"samsung",
"scale":"2.0",
"timezone":"28800",
"language":"zh",
"cns":"3",
"carrier": "Android",
#"imsi": "310260000000000",
"user-agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G955N Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36",
"lon": "105.566938",
"lat": "29.99831",
"cid": "512000",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Accept-Encoding": "gzip, deflate",
"Connection": "Keep-Alive",
# "Cookie": "duid=58349118",
"Host": "api.douguo.net",
#"Content-Length": "65"
}
response = requests.post(url=url,headers=header,data=data)
return response
def handle_index():
url = ""
# client=4&_session=1547000257341354730010002552&v=1503650468&_vs=0
data ={
"client":"4",
"_session":"1547000257341354730010002552",
"v":"1503650468",
"_vs":"0"
}
response = handle_request(url,data)
print(response.text)
handle_index()
爬取詳情,資訊透過分類找到裡面的詳情
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/9 11:06
# @Author : lm
# @Url : idig8.com
# @Site :
# @File : spider_douguomeishi.py
# @Software: PyCharm
import json
import requests
from multiprocessing import Queue
#建立佇列
queue_list = Queue()
def handle_request(url,data):
header ={
"client": "4",
"version": "6916.2",
"device": "SM-G955N",
"sdk": "22,5.1.1",
"imei": "354730010002552",
"channel": "zhuzhan",
"mac": "00:FF:E2:A2:7B:58",
"resolution": "1440*900",
"dpi":"2.0",
"android-id":"bcdaf527105cc26f",
"pseudo-id":"354730010002552",
"brand":"samsung",
"scale":"2.0",
"timezone":"28800",
"language":"zh",
"cns":"3",
"carrier": "Android",
#"imsi": "310260000000000",
"user-agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G955N Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36",
"lon": "105.566938",
"lat": "29.99831",
"cid": "512000",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Accept-Encoding": "gzip, deflate",
"Connection": "Keep-Alive",
# "Cookie": "duid=58349118",
"Host": "api.douguo.net",
#"Content-Length": "65"
}
response = requests.post(url=url,headers=header,data=data)
return response
def handle_index():
url = ""
# client=4&_session=1547000257341354730010002552&v=1503650468&_vs=0
data ={
"client":"4",
"_session":"1547000257341354730010002552",
"v":"1503650468",
"_vs":"0"
}
response = handle_request(url,data)
# print(response.text)
index_response_dic = json.loads(response.text)
for item_index in index_response_dic["result"]["cs"]:
# print(item_index)
for item_index_cs in item_index["cs"]:
# print(item_index_cs)
for item in item_index_cs["cs"]:
#print(item)
data_2 ={
"client":"4",
"_session":"1547000257341354730010002552",
"keyword":item["name"],
"_vs ":"400"
}
#print(data_2)
queue_list.put(data_2)
handle_index()
print(queue_list.qsize())
分類菜譜內部的詳情資訊
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/9 11:06
# @Author : lm
# @Url : idig8.com
# @Site :
# @File : spider_douguomeishi.py
# @Software: PyCharm
import json
import requests
from multiprocessing import Queue
#建立佇列
queue_list = Queue()
def handle_request(url,data):
header ={
"client": "4",
"version": "6916.2",
"device": "SM-G955N",
"sdk": "22,5.1.1",
"imei": "354730010002552",
"channel": "zhuzhan",
"mac": "00:FF:E2:A2:7B:58",
"resolution": "1440*900",
"dpi":"2.0",
"android-id":"bcdaf527105cc26f",
"pseudo-id":"354730010002552",
"brand":"samsung",
"scale":"2.0",
"timezone":"28800",
"language":"zh",
"cns":"3",
"carrier": "Android",
#"imsi": "310260000000000",
"user-agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G955N Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36",
"lon": "105.566938",
"lat": "29.99831",
"cid": "512000",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Accept-Encoding": "gzip, deflate",
"Connection": "Keep-Alive",
# "Cookie": "duid=58349118",
"Host": "api.douguo.net",
#"Content-Length": "65"
}
response = requests.post(url=url,headers=header,data=data)
return response
def handle_index():
url = ""
# client=4&_session=1547000257341354730010002552&v=1503650468&_vs=0
data ={
"client":"4",
"_session":"1547000257341354730010002552",
"v":"1503650468",
"_vs":"0"
}
response = handle_request(url,data)
# print(response.text)
index_response_dic = json.loads(response.text)
for item_index in index_response_dic["result"]["cs"]:
# print(item_index)
for item_index_cs in item_index["cs"]:
# print(item_index_cs)
for item in item_index_cs["cs"]:
#print(item)
data_2 ={
"client":"4",
#"_session":"1547000257341354730010002552",
"keyword":item["name"],
"_vs ":"400",
"order":"0"
}
#print(data_2)
queue_list.put(data_2)
def handle_caipu_list(data):
print("當前的食材:",data["keyword"])
caipu_list_url = "";
caipu_response = handle_request(caipu_list_url, data)
caipu_response_dict = json.loads(caipu_response.text)
for caipu_item in caipu_response_dict["result"]["list"]:
caipu_info ={}
caipu_info["shicai"] = data["keyword"]
if caipu_item["type"]==13:
caipu_info["user_name"] = caipu_item["r"]["an"]
caipu_info["shicai_id"] = caipu_item["r"]["id"]
caipu_info["describe"] = caipu_item["r"]["cookstory"].replace("n","").replace(" ","")
caipu_info["caipu_name"] = caipu_item["r"]["n"]
caipu_info["zuoliao_list"] = caipu_item["r"]["major"]
print(caipu_info)
else:
continue
handle_index()
handle_caipu_list(queue_list.get())
菜品內部的詳情資訊
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/9 11:06
# @Author : lm
# @Url : idig8.com
# @Site :
# @File : spider_douguomeishi.py
# @Software: PyCharm
import json
import requests
from multiprocessing import Queue
#建立佇列
queue_list = Queue()
def handle_request(url,data):
header ={
"client": "4",
"version": "6916.2",
"device": "SM-G955N",
"sdk": "22,5.1.1",
"imei": "354730010002552",
"channel": "zhuzhan",
"mac": "00:FF:E2:A2:7B:58",
"resolution": "1440*900",
"dpi":"2.0",
"android-id":"bcdaf527105cc26f",
"pseudo-id":"354730010002552",
"brand":"samsung",
"scale":"2.0",
"timezone":"28800",
"language":"zh",
"cns":"3",
"carrier": "Android",
#"imsi": "310260000000000",
"user-agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G955N Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36",
"lon": "105.566938",
"lat": "29.99831",
"cid": "512000",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Accept-Encoding": "gzip, deflate",
"Connection": "Keep-Alive",
# "Cookie": "duid=58349118",
"Host": "api.douguo.net",
#"Content-Length": "65"
}
response = requests.post(url=url,headers=header,data=data)
return response
def handle_index():
url = ""
# client=4&_session=1547000257341354730010002552&v=1503650468&_vs=0
data ={
"client":"4",
"_session":"1547000257341354730010002552",
"v":"1503650468",
"_vs":"0"
}
response = handle_request(url,data)
# print(response.text)
index_response_dic = json.loads(response.text)
for item_index in index_response_dic["result"]["cs"]:
# print(item_index)
for item_index_cs in item_index["cs"]:
# print(item_index_cs)
for item in item_index_cs["cs"]:
#print(item)
data_2 ={
"client":"4",
#"_session":"1547000257341354730010002552",
"keyword":item["name"],
"_vs ":"400",
"order":"0"
}
#print(data_2)
queue_list.put(data_2)
def handle_caipu_list(data):
print("當前的食材:",data["keyword"])
caipu_list_url = "";
caipu_response = handle_request(caipu_list_url, data)
caipu_response_dict = json.loads(caipu_response.text)
for caipu_item in caipu_response_dict["result"]["list"]:
caipu_info ={}
caipu_info["shicai"] = data["keyword"]
if caipu_item["type"]==13:
caipu_info["user_name"] = caipu_item["r"]["an"]
caipu_info["shicai_id"] = caipu_item["r"]["id"]
caipu_info["describe"] = caipu_item["r"]["cookstory"].replace("n","").replace(" ","")
caipu_info["caipu_name"] = caipu_item["r"]["n"]
caipu_info["zuoliao_list"] = caipu_item["r"]["major"]
#print(caipu_info)
detail_url = ""+ str(caipu_info["shicai_id"])
detail_data ={
"client":"4",
"_session":"1547000257341354730010002552",
"author_id":"0",
"_vs":"2803",
"ext":'{"query": {"kw": "'+data["keyword"]+'", "src": "2803", "idx": "1", "type": "13", "id": '+str(caipu_info["shicai_id"])+'}}'
}
detail_reponse = handle_request(detail_url,detail_data)
detail_reponse_dic = json.loads(detail_reponse.text)
caipu_info["tips"] = detail_reponse_dic["result"]["recipe"]["tips"]
caipu_info["cookstep"] = detail_reponse_dic["result"]["recipe"]["cookstep"]
print(json.dumps(caipu_info))
else:
continue
handle_index()
handle_caipu_list(queue_list.get())
將資料儲存在mongodb中
- 透過vagrant 安裝虛擬機器
vagrant up
- 進入虛擬機器
ip 192.168.66.100
su -
#密碼:vagrant
docker
- 拉取mongodb的映象
預設埠:27017
docker pull bitnami/mongodb:latest
- 建立mongodb的容器
mkdir bitnami
cd bitnami
mkdir mongodb
docker run -d -v /path/to/mongodb-persistence:/root/bitnami -p 27017:27017 bitnami/mongodb:latest
#關閉防火牆
systemctl stop firewalld
用第三方工具連線
連線mongodb的工具
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/11 0:53
# @Author : liming
# @Site :
# @File : handle_mongodb.py
# @url : idig8.com
# @Software: PyCharm
import pymongo
from pymongo.collection import Collection
class Connect_mongo(object):
def __init__(self):
self.client = pymongo.MongoClient(host="192.168.66.100",port=27017)
self.db_data = self.client["dou_guo_mei_shi"]
def insert_item(self,item):
db_collection = Collection(self.db_data,'dou_guo_mei_shi_item')
db_collection.insert(item)
# 暴露出來
mongo_info = Connect_mongo()
python爬取的資料透過mongo的工具儲存到centos7的docker映象中
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/9 11:06
# @Author : lm
# @Url : idig8.com
# @Site :
# @File : spider_douguomeishi.py
# @Software: PyCharm
import json
import requests
from multiprocessing import Queue
from handle_mongo import mongo_info
#建立佇列
queue_list = Queue()
def handle_request(url,data):
header ={
"client": "4",
"version": "6916.2",
"device": "SM-G955N",
"sdk": "22,5.1.1",
"imei": "354730010002552",
"channel": "zhuzhan",
"mac": "00:FF:E2:A2:7B:58",
"resolution": "1440*900",
"dpi":"2.0",
"android-id":"bcdaf527105cc26f",
"pseudo-id":"354730010002552",
"brand":"samsung",
"scale":"2.0",
"timezone":"28800",
"language":"zh",
"cns":"3",
"carrier": "Android",
#"imsi": "310260000000000",
"user-agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G955N Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36",
"lon": "105.566938",
"lat": "29.99831",
"cid": "512000",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Accept-Encoding": "gzip, deflate",
"Connection": "Keep-Alive",
# "Cookie": "duid=58349118",
"Host": "api.douguo.net",
#"Content-Length": "65"
}
response = requests.post(url=url,headers=header,data=data)
return response
def handle_index():
url = ""
# client=4&_session=1547000257341354730010002552&v=1503650468&_vs=0
data ={
"client":"4",
"_session":"1547000257341354730010002552",
"v":"1503650468",
"_vs":"0"
}
response = handle_request(url,data)
# print(response.text)
index_response_dic = json.loads(response.text)
for item_index in index_response_dic["result"]["cs"]:
# print(item_index)
for item_index_cs in item_index["cs"]:
# print(item_index_cs)
for item in item_index_cs["cs"]:
#print(item)
data_2 ={
"client":"4",
#"_session":"1547000257341354730010002552",
"keyword":item["name"],
"_vs ":"400",
"order":"0"
}
#print(data_2)
queue_list.put(data_2)
def handle_caipu_list(data):
print("當前的食材:",data["keyword"])
caipu_list_url = "";
caipu_response = handle_request(caipu_list_url, data)
caipu_response_dict = json.loads(caipu_response.text)
for caipu_item in caipu_response_dict["result"]["list"]:
caipu_info ={}
caipu_info["shicai"] = data["keyword"]
if caipu_item["type"]==13:
caipu_info["user_name"] = caipu_item["r"]["an"]
caipu_info["shicai_id"] = caipu_item["r"]["id"]
caipu_info["describe"] = caipu_item["r"]["cookstory"].replace("n","").replace(" ","")
caipu_info["caipu_name"] = caipu_item["r"]["n"]
caipu_info["zuoliao_list"] = caipu_item["r"]["major"]
#print(caipu_info)
detail_url = ""+ str(caipu_info["shicai_id"])
detail_data ={
"client":"4",
"_session":"1547000257341354730010002552",
"author_id":"0",
"_vs":"2803",
"ext":'{"query": {"kw": "'+data["keyword"]+'", "src": "2803", "idx": "1", "type": "13", "id": '+str(caipu_info["shicai_id"])+'}}'
}
detail_reponse = handle_request(detail_url,detail_data)
detail_reponse_dic = json.loads(detail_reponse.text)
caipu_info["tips"] = detail_reponse_dic["result"]["recipe"]["tips"]
caipu_info["cookstep"] = detail_reponse_dic["result"]["recipe"]["cookstep"]
#print(json.dumps(caipu_info))
mongo_info.insert_item(caipu_info)
else:
continue
handle_index()
handle_caipu_list(queue_list.get())
透過python多執行緒-執行緒池抓取
- python3透過concurrent.futures import ThreadPoolExecutor
引用執行緒池
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/9 11:06
# @Author : lm
# @Url : idig8.com
# @Site :
# @File : spider_douguomeishi.py
# @Software: PyCharm
import json
import requests
from multiprocessing import Queue
from handle_mongo import mongo_info
from concurrent.futures import ThreadPoolExecutor
#建立佇列
queue_list = Queue()
def handle_request(url,data):
header ={
"client": "4",
"version": "6916.2",
"device": "SM-G955N",
"sdk": "22,5.1.1",
"imei": "354730010002552",
"channel": "zhuzhan",
"mac": "00:FF:E2:A2:7B:58",
"resolution": "1440*900",
"dpi":"2.0",
"android-id":"bcdaf527105cc26f",
"pseudo-id":"354730010002552",
"brand":"samsung",
"scale":"2.0",
"timezone":"28800",
"language":"zh",
"cns":"3",
"carrier": "Android",
#"imsi": "310260000000000",
"user-agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G955N Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36",
"lon": "105.566938",
"lat": "29.99831",
"cid": "512000",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Accept-Encoding": "gzip, deflate",
"Connection": "Keep-Alive",
# "Cookie": "duid=58349118",
"Host": "api.douguo.net",
#"Content-Length": "65"
}
response = requests.post(url=url,headers=header,data=data)
return response
def handle_index():
url = ""
# client=4&_session=1547000257341354730010002552&v=1503650468&_vs=0
data ={
"client":"4",
"_session":"1547000257341354730010002552",
"v":"1503650468",
"_vs":"0"
}
response = handle_request(url,data)
# print(response.text)
index_response_dic = json.loads(response.text)
for item_index in index_response_dic["result"]["cs"]:
# print(item_index)
for item_index_cs in item_index["cs"]:
# print(item_index_cs)
for item in item_index_cs["cs"]:
#print(item)
data_2 ={
"client":"4",
#"_session":"1547000257341354730010002552",
"keyword":item["name"],
"_vs ":"400",
"order":"0"
}
#print(data_2)
queue_list.put(data_2)
def handle_caipu_list(data):
print("當前的食材:",data["keyword"])
caipu_list_url = "";
caipu_response = handle_request(caipu_list_url, data)
caipu_response_dict = json.loads(caipu_response.text)
for caipu_item in caipu_response_dict["result"]["list"]:
caipu_info ={}
caipu_info["shicai"] = data["keyword"]
if caipu_item["type"]==13:
caipu_info["user_name"] = caipu_item["r"]["an"]
caipu_info["shicai_id"] = caipu_item["r"]["id"]
caipu_info["describe"] = caipu_item["r"]["cookstory"].replace("n","").replace(" ","")
caipu_info["caipu_name"] = caipu_item["r"]["n"]
caipu_info["zuoliao_list"] = caipu_item["r"]["major"]
#print(caipu_info)
detail_url = ""+ str(caipu_info["shicai_id"])
detail_data ={
"client":"4",
"_session":"1547000257341354730010002552",
"author_id":"0",
"_vs":"2803",
"ext":'{"query": {"kw": "'+data["keyword"]+'", "src": "2803", "idx": "1", "type": "13", "id": '+str(caipu_info["shicai_id"])+'}}'
}
detail_reponse = handle_request(detail_url,detail_data)
detail_reponse_dic = json.loads(detail_reponse.text)
caipu_info["tips"] = detail_reponse_dic["result"]["recipe"]["tips"]
caipu_info["cookstep"] = detail_reponse_dic["result"]["recipe"]["cookstep"]
#print(json.dumps(caipu_info))
mongo_info.insert_item(caipu_info)
else:
continue
handle_index()
pool = ThreadPoolExecutor(max_workers=20)
while queue_list.qsize()>0:
pool.submit(handle_caipu_list,queue_list.get())
透過使用代理ip隱藏爬蟲
當app運維人員,發現我們的一直在請求他們的伺服器,很可能就把我們們的ip給封了,透過代理ip的方式。隱藏自我。
- 註冊申請
一個小時1元,我申請了一個小時我們們一起使用下
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/11 2:40
# @Author : Aries
# @Site :
# @File : handle_proxy.py
# @Software: PyCharm
#60.17.177.187 代理出來的ip
import requests
url = ''
proxy = {'http':''}
response = requests.get(url=url,proxies=proxy)
print(response.text)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/9 11:06
# @Author : lm
# @Url : idig8.com
# @Site :
# @File : spider_douguomeishi.py
# @Software: PyCharm
import json
import requests
from multiprocessing import Queue
from handle_mongo import mongo_info
from concurrent.futures import ThreadPoolExecutor
#建立佇列
queue_list = Queue()
def handle_request(url,data):
header ={
"client": "4",
"version": "6916.2",
"device": "SM-G955N",
"sdk": "22,5.1.1",
"imei": "354730010002552",
"channel": "zhuzhan",
"mac": "00:FF:E2:A2:7B:58",
"resolution": "1440*900",
"dpi":"2.0",
"android-id":"bcdaf527105cc26f",
"pseudo-id":"354730010002552",
"brand":"samsung",
"scale":"2.0",
"timezone":"28800",
"language":"zh",
"cns":"3",
"carrier": "Android",
#"imsi": "310260000000000",
"user-agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G955N Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36",
"lon": "105.566938",
"lat": "29.99831",
"cid": "512000",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"Accept-Encoding": "gzip, deflate",
"Connection": "Keep-Alive",
# "Cookie": "duid=58349118",
"Host": "api.douguo.net",
#"Content-Length": "65"
}
proxy = {'http': ''}
response = requests.post(url=url,headers=header,data=data,proxies=proxy)
return response
def handle_index():
url = ""
# client=4&_session=1547000257341354730010002552&v=1503650468&_vs=0
data ={
"client":"4",
"_session":"1547000257341354730010002552",
"v":"1503650468",
"_vs":"0"
}
response = handle_request(url,data)
# print(response.text)
index_response_dic = json.loads(response.text)
for item_index in index_response_dic["result"]["cs"]:
# print(item_index)
for item_index_cs in item_index["cs"]:
# print(item_index_cs)
for item in item_index_cs["cs"]:
#print(item)
data_2 ={
"client":"4",
#"_session":"1547000257341354730010002552",
"keyword":item["name"],
"_vs ":"400",
"order":"0"
}
#print(data_2)
queue_list.put(data_2)
def handle_caipu_list(data):
print("當前的食材:",data["keyword"])
caipu_list_url = "";
caipu_response = handle_request(caipu_list_url, data)
caipu_response_dict = json.loads(caipu_response.text)
for caipu_item in caipu_response_dict["result"]["list"]:
caipu_info ={}
caipu_info["shicai"] = data["keyword"]
if caipu_item["type"]==13:
caipu_info["user_name"] = caipu_item["r"]["an"]
caipu_info["shicai_id"] = caipu_item["r"]["id"]
caipu_info["describe"] = caipu_item["r"]["cookstory"].replace("n","").replace(" ","")
caipu_info["caipu_name"] = caipu_item["r"]["n"]
caipu_info["zuoliao_list"] = caipu_item["r"]["major"]
#print(caipu_info)
detail_url = ""+ str(caipu_info["shicai_id"])
detail_data ={
"client":"4",
"_session":"1547000257341354730010002552",
"author_id":"0",
"_vs":"2803",
"ext":'{"query": {"kw": "'+data["keyword"]+'", "src": "2803", "idx": "1", "type": "13", "id": '+str(caipu_info["shicai_id"])+'}}'
}
detail_reponse = handle_request(detail_url,detail_data)
detail_reponse_dic = json.loads(detail_reponse.text)
caipu_info["tips"] = detail_reponse_dic["result"]["recipe"]["tips"]
caipu_info["cookstep"] = detail_reponse_dic["result"]["recipe"]["cookstep"]
#print(json.dumps(caipu_info))
mongo_info.insert_item(caipu_info)
else:
continue
handle_index()
pool = ThreadPoolExecutor(max_workers=2)
while queue_list.qsize()>0:
pool.submit(handle_caipu_list,queue_list.get())
PS:本次是app資料抓取的入門。首先是透過模擬器的代理服務,到本地的電腦(安裝fiddler),這樣fiddler就可以抓取資料了,分析資料這塊要憑藉自己的經驗找到對應的url,如果能分析到url,基本爬蟲就寫一半。封裝請求頭。透過fiddler獲取的。裡面header內容比較多,嘗試刪除最簡化,也是一種反爬蟲的策略,有的資料放進去到容易被發現是爬蟲了,例如cookies等等,但是有的爬蟲爬取資料需要cookies。透過代理的方式設定代理ip,防止爬取過程中同一個ip,一直請求一個介面被發現是爬蟲。引入了佇列的目的就是為了使用執行緒池的時候方便提取。然後放入mongodb中。這樣使用多執行緒的app資料就完成了。
來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/3486/viewspace-2822917/,如需轉載,請註明出處,否則將追究法律責任。
相關文章
- Python 爬蟲實戰Python爬蟲
- 爬蟲技術實戰爬蟲
- python 爬蟲實戰的原理Python爬蟲
- python爬蟲實戰教程-Python爬蟲開發實戰教程(微課版)Python爬蟲
- python爬蟲庫技術分享Python爬蟲
- python爬蟲-33個Python爬蟲專案實戰(推薦)Python爬蟲
- python3網路爬蟲開發實戰_Python3 爬蟲實戰Python爬蟲
- Python爬蟲入門教程 55-100 python爬蟲高階技術之驗證碼篇Python爬蟲
- 「docker實戰篇」python的docker- 多裝置端併發抓取抖DockerPython
- Python爬蟲抓取技術的門道Python爬蟲
- 精通 Python 網路爬蟲:核心技術、框架與專案實戰Python爬蟲框架
- 不踩坑的Python爬蟲:Python爬蟲開發與專案實戰,從爬蟲入門 PythonPython爬蟲
- Python網路爬蟲實戰Python爬蟲
- Python爬蟲實戰之bilibiliPython爬蟲
- python爬蟲實戰,爬蟲之路,永無止境Python爬蟲
- 【Python篇】scrapy爬蟲Python爬蟲
- 如何高效的學習Python爬蟲技術?Python入門Python爬蟲
- python網路爬蟲應用_python網路爬蟲應用實戰Python爬蟲
- Python【爬蟲實戰】提取資料Python爬蟲
- Python爬蟲入門教程 56-100 python爬蟲高階技術之驗證碼篇2-開放平臺OCR技術Python爬蟲
- Python爬蟲 ---scrapy框架初探及實戰Python爬蟲框架
- Python爬蟲實戰之叩富網Python爬蟲
- Python網路爬蟲實戰專案大全 32個Python爬蟲專案demoPython爬蟲
- IDEA的Docker外掛實戰(Docker Image篇)IdeaDocker
- python3 爬蟲實戰:為爬蟲新增 GUI 影象介面Python爬蟲GUI
- Python爬蟲實戰:爬取淘寶的商品資訊Python爬蟲
- python爬蟲實操專案_Python爬蟲開發與專案實戰 1.6 小結Python爬蟲
- Python爬蟲入門實戰之貓眼電影資料抓取(實戰篇)Python爬蟲
- 為什麼學習python及爬蟲,Python爬蟲[入門篇]?Python爬蟲
- Python爬蟲入門教程 58-100 python爬蟲高階技術之驗證碼篇4-極驗證識別技術之一Python爬蟲
- Python爬蟲入門教程 57-100 python爬蟲高階技術之驗證碼篇3-滑動驗證碼識別技術Python爬蟲
- Python爬蟲實戰詳解:爬取圖片之家Python爬蟲
- 《Python3網路爬蟲開發實戰》教程||爬蟲教程Python爬蟲
- IDEA的Docker外掛實戰(Docker-compose篇)IdeaDocker
- Python 實戰:用 Scrapyd 打造爬蟲控制檯Python爬蟲
- Python 爬蟲實戰(二):使用 requests-htmlPython爬蟲HTML
- 乾貨分享!Python網路爬蟲實戰Python爬蟲
- Python網路爬蟲實戰小專案Python爬蟲