爬蟲之股票定向爬取

Mr.chris發表於2018-12-06

本次是股票定向爬取,從東方財富網上獲取所有股票程式碼,然後在百度股票網上開啟每個個股股票資訊,提取所要儲存的股票資訊。採取的技術路線是re+bs4+requests。

import requests
from bs4 import BeautifulSoup
import traceback     #處理異常
import re


#獲取頁面的函式
def getHTMLText(url,code=`utf-8`):              #這裡編碼事先檢視網頁的編碼格式
    try:
        r=requests.get(url)
        r.raise_for_status()
        r.encoding = code
        return r.text
    except:
        return ""

#獲取所有的股票程式碼儲存在一個列表中
def getStockList(lst,stockURL):
    html = getHTMLText(stockURL,`GB2312`)
    soup = BeautifulSoup(html,`html.parser`)
    a = soup.find_all(`a`)
    for i in a:
        try:
            href = i.attrs[`href`]
            lst.append(re.findall(r"[s][hz]d{6}",href)[0])
        except:
            continue


#獲取每隻個股股票的資訊並儲存到檔案中,並顯示儲存爬取的進度
def getStockInfo(lst,stockURL,fpath):
    count = 0
    for stock in lst:
        url = stockURL + stock + `.html`
        html = getHTMLText(url)
        try:
            if html == "":
                continue
            infoDict = {}
            soup = BeautifulSoup(html,`html.parser`)
            stockInfo = soup.find(`div`,attrs = {`class`:`stock-bets`})
            name = stockInfo.find.all(attrs={`class`:`bets-name`})[0]
            infoDict.update({`股票名稱`:name.text.split()[0]})

            keyList = stockInfo.find_all(`dt`)
            valueList = stockInfo.find_all(`dd`)
            for i in range(len(keyList)):
                  key = keyList[i].text
                  val = valueList[i].text
                  infoDict[key]=val
            
            with open(fpath,`a`,encoding=`utf-8`) as f:
                  f.write(str(infoDict) + `
`)
                  count = count +1
                  print("
當前進度:{:.2f}%".format(count*100/len(lst)),end="")
             except:
                  count = count +1
                  print("
當前進度:{:.2f}%".format(count*100/len(lst)),end="")
                  continue



#主函式
def main()
    stock_list_url = `http://quote.eastmoney.com/stocklist.html`
    stock_info_url = `https://gupiao.baidu.com/stock/`
    output_file = `D:/BaiduStockInfo.txt`
    slist = []
    getStockList(slist,stock_list_url)
    getStockInfo(slist,stock_info_url,output_file)

main()

 

相關文章