通過live555實現H264 RTSP直播(Windows版)
為何標明“Windows版”,因為firehood大神已經實現了linux版:通過live555實現H264 RTSP直播
相關文章:
【1】Win7(Windows 7)下用VS2013(Visual Studio 2013)編譯live555
【2】RTSP協議分析
【3】windows命名管道
一.基礎
live555的學習基本上都是從E:\live555\testProgs中的testOnDemandRTSPServer.cpp示例開始的,這個例子實現了一個最簡單的RTSP伺服器。檔名中的“OnDemand”意思是:依指令行事,也就是說只有當客戶端通過URL主動訪問併傳送相關指令時,該RTSP伺服器才會將檔案流化並推送到客戶端。這個例子是基於RTP單播的,關於單播可以參考:Qt呼叫jrtplib實現單播、多播和廣播
通過testOnDemandRTSPServer.cpp可以學習一個RTSP伺服器的搭建步驟。這裡新建一個名為h264LiveMediaServer的Win32控制檯工程,新建並新增h264LiveMediaServer.cpp,然後將testOnDemandRTSPServer.cpp拷貝到h264LiveMediaServer.cpp,接著做少量修改,只保留與H.264會話相關的部分,如下所示:
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
UsageEnvironment* env;
// True:後啟動的客戶端總是從當前第一個客戶端已經播放到的位置開始播放
// False:每個客戶端都從頭開始播放影視訊檔案
Boolean reuseFirstSource = False;
//該函式列印相關資訊
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
char const* streamName, char const* inputFileName);
int main(int argc, char** argv)
{
//建立任務排程器並初始化使用環境
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
//建立RTSP伺服器,開始監聽模客戶端的連線
//注意這裡的埠號不是預設的554埠,因此訪問URL時,需指定該埠號
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
char const* descriptionString
= "Session streamed by \"h264LiveMediaServer\"";
//流名字,媒體名
char const* streamName = "h264ESVideoTest";
//檔名,當客戶端輸入的流名字為h264ESVideoTest時,實際上開啟的是test.264檔案。
//這裡需要特別注意一點,當通過IDE執行h264LiveMediaServer時,live555推送的是專案工作目錄中的視訊或音訊。工作目錄也就是和*.vcxproj同級的目錄,
//此時視訊應該放在這個目錄下。當雙擊h264LiveMediaServer.exe執行時,視訊理所當然的和h264LiveMediaServer.exe放在一個目錄。
char const* inputFileName = "480320.264";
//當客戶點播時,要輸入流名字streamName,告訴RTSP伺服器點播的是哪個流。
//建立媒體會話,流名字和檔名的對應關係是通過增加子會話建立起來的。媒體會話對會話描述、會話持續時間、流名字等與會話有關的資訊進行管理。
//第2個引數:媒體名、3:媒體資訊、4:媒體描述
ServerMediaSession* sms= ServerMediaSession::createNew(*env, streamName, streamName,descriptionString);
//新增264子會話 這裡的檔名才是真正要開啟檔案的名字
//H264VideoFileServerMediaSubsession類派生自FileServerMediaSubsession派生自OnDemandServerMediaSubsession
//而OnDemandServerMediaSubsession和PassiveMediaSubsession共同派生自ServerMediaSubsession
//關於讀取檔案之類都在這個類中實現的,如果要將點播改為直播就是要新建類繼承此類然後新增新的方法
sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(*env, inputFileName, reuseFirstSource));
//為rtspserver新增session
rtspServer->addServerMediaSession(sms);
//答應資訊到標準輸出
announceStream(rtspServer, sms, streamName, inputFileName);
//試圖為RTSP-over-HTTP通道建立一個HTTP伺服器.
if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080))
{
*env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
}
else
{
*env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
}
//進入事件迴圈,對套接字的讀取事件和對媒體檔案的延時傳送操作都在這個迴圈中完成。
env->taskScheduler().doEventLoop();
return 0;
}
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
char const* streamName, char const* inputFileName) {
char* url = rtspServer->rtspURL(sms);
UsageEnvironment& env = rtspServer->envir();
env << "\n\"" << streamName << "\" stream, from the file \""
<< inputFileName << "\"\n";
env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
}
如何測試可參考【1】,測試結果如下所示:
二.實現
在通過live555實現H264 RTSP直播中,博主是通過FIFO佇列實現的,FIFO佇列實際上是Linux下的命名管道,而Windows下也有命名管道,因此在Windows中的流程圖如下所示:
關於Windows命名管道詳見【3】。
這裡不使用命名管道來實現,而是直接讀取本地H264檔案,分解成StartCode+NALU記憶體塊,然後拷貝到Live555 Server。這樣一來,就很容易改成命名管道的形式,命名管道的客戶端只需讀取本地H264檔案,分解成StartCode(0x000001或0x00000001)+NALU記憶體塊,並寫入管道,命名管道伺服器端(在Live555 Server中)讀取管道資料,並拷貝到Live555 Server。
通過“基礎”中的分析可以得出,想實現自定義伺服器,需要將sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(*env, inputFileName,reuseFirstSource)),中的H264VideoFileServerMediaSubsession替換成自己的子會話。H264VideoFileServerMediaSubsession類在其createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate)函式中呼叫了ByteStreamFileSource::createNew(envir(), fFileName),而frame的獲取正是在ByteStreamFileSource類中的doGetNextFrame()函式中實現的。因此,這裡需要繼承H264VideoFileServerMediaSubsession和ByteStreamFileSource類,並重寫其中的createNewStreamSource和doGetNextFrame函式。
程式碼如下所示:
h264LiveFramedSource.hh
#ifndef _H264LIVEFRAMEDSOURCE_HH
#define _H264LIVEFRAMEDSOURCE_HH
#include <ByteStreamFileSource.hh>
class H264LiveFramedSource : public ByteStreamFileSource
{
public:
static H264LiveFramedSource* createNew(UsageEnvironment& env, unsigned preferredFrameSize = 0, unsigned playTimePerFrame = 0);
protected:
H264LiveFramedSource(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame);
~H264LiveFramedSource();
private:
//重定義虛擬函式
virtual void doGetNextFrame();
};
#endif
h264LiveFramedSource.cpp
#include "h264LiveFramedSource.hh"
#include "GroupsockHelper.hh"
#include "spsdecode.h"
int findStartCode(unsigned char *buf, int zeros_in_startcode)
{
int info;
int i;
info = 1;
for (i = 0; i < zeros_in_startcode; i++)
if (buf[i] != 0)
info = 0;
if (buf[i] != 1)
info = 0;
return info;
}
//此處的NALU包括StartCode
int getNextNalu(FILE* inpf, unsigned char* buf)
{
int pos = 0;
int startCodeFound = 0;
int info2 = 0;
int info3 = 0;
while (!feof(inpf) && (buf[pos++] = fgetc(inpf)) == 0);
while (!startCodeFound)
{
if (feof(inpf))
{
return pos - 1;
}
buf[pos++] = fgetc(inpf);
info3 = findStartCode(&buf[pos - 4], 3);
startCodeFound=(info3 == 1);
if (info3 != 1)
info2 = findStartCode(&buf[pos - 3], 2);
startCodeFound = (info2 == 1 || info3 == 1);
}
if (info2)
{
fseek(inpf, -3, SEEK_CUR);
return pos - 3;
}
if (info3)
{
fseek(inpf, -4, SEEK_CUR);
return pos - 4;
}
}
FILE * inpf;
unsigned char* inBuf;
int inLen;
int nFrameRate;
H264LiveFramedSource::H264LiveFramedSource(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame)
: ByteStreamFileSource(env, 0, preferredFrameSize, playTimePerFrame)
{
char *fname = "480320.264";
inpf = NULL;
inpf = fopen(fname, "rb");
inBuf = (unsigned char*)calloc(1024 * 100, sizeof(char));
inLen = 0;
inLen = getNextNalu(inpf, inBuf);
// 讀取SPS幀
unsigned int nSpsLen = inLen - 4;
unsigned char *pSps = (unsigned char*)malloc(nSpsLen);
memcpy(pSps, inBuf + 4, nSpsLen);
// 解碼SPS,獲取視訊影像寬、高資訊
int width = 0, height = 0, fps = 0;
h264_decode_sps(pSps, nSpsLen, width, height, fps);
nFrameRate = 0;
if (fps)
nFrameRate = fps;
else
nFrameRate = 25;
}
H264LiveFramedSource* H264LiveFramedSource::createNew(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame)
{
H264LiveFramedSource* newSource = new H264LiveFramedSource(env, preferredFrameSize, playTimePerFrame);
return newSource;
}
H264LiveFramedSource::~H264LiveFramedSource()
{
free(inBuf);
fclose(inpf);
}
// This function is called when new frame data is available from the device.
// We deliver this data by copying it to the 'downstream' object, using the following parameters (class members):
// 'in' parameters (these should *not* be modified by this function):
// fTo: The frame data is copied to this address.
// (Note that the variable "fTo" is *not* modified. Instead,
// the frame data is copied to the address pointed to by "fTo".)
// fMaxSize: This is the maximum number of bytes that can be copied
// (If the actual frame is larger than this, then it should
// be truncated, and "fNumTruncatedBytes" set accordingly.)
// 'out' parameters (these are modified by this function):
// fFrameSize: Should be set to the delivered frame size (<= fMaxSize).
// fNumTruncatedBytes: Should be set iff the delivered frame would have been
// bigger than "fMaxSize", in which case it's set to the number of bytes
// that have been omitted.
// fPresentationTime: Should be set to the frame's presentation time
// (seconds, microseconds). This time must be aligned with 'wall-clock time' - i.e., the time that you would get
// by calling "gettimeofday()".
// fDurationInMicroseconds: Should be set to the frame's duration, if known.
// If, however, the device is a 'live source' (e.g., encoded from a camera or microphone), then we probably don't need
// to set this variable, because - in this case - data will never arrive 'early'.
void H264LiveFramedSource::doGetNextFrame()
{
fFrameSize = inLen;
if (fFrameSize > fMaxSize)
{
fNumTruncatedBytes = fFrameSize - fMaxSize;
fFrameSize = fMaxSize;
}
else
{
fNumTruncatedBytes = 0;
}
memmove(fTo, inBuf, fFrameSize);
inLen = 0;
inLen = getNextNalu(inpf, inBuf);
gettimeofday(&fPresentationTime, NULL);//時間戳
fDurationInMicroseconds = 1000000 / nFrameRate;//控制播放速度
//表示延遲0秒後再執行afterGetting函式,也可以直接用afterGetting(this)
nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this);
}
h264LiveVideoServerMediaSubssion.hh
#ifndef _H264LIVEVIDEOSERVERMEDIASUBSSION_HH
#define _H264LIVEVIDEOSERVERMEDIASUBSSION_HH
#include "H264VideoFileServerMediaSubsession.hh"
class H264LiveVideoServerMediaSubssion : public H264VideoFileServerMediaSubsession {
public:
static H264LiveVideoServerMediaSubssion* createNew(UsageEnvironment& env, Boolean reuseFirstSource);
protected:
H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource);
~H264LiveVideoServerMediaSubssion();
protected:
//重定義虛擬函式
FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate);
};
#endif
h264LiveVideoServerMediaSubssion.cpp
#include "h264LiveVideoServerMediaSubssion.hh"
#include "h264LiveFramedSource.hh"
#include "H264VideoStreamFramer.hh"
H264LiveVideoServerMediaSubssion* H264LiveVideoServerMediaSubssion::createNew(UsageEnvironment& env, Boolean reuseFirstSource)
{
return new H264LiveVideoServerMediaSubssion(env, reuseFirstSource);
}
H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource)
: H264VideoFileServerMediaSubsession(env, 0, reuseFirstSource)
{
}
H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
{
}
FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
{
//estimate bitrate:估計的位元率,記得根據需求修改
estBitrate = 1000; // kbps
//建立視訊源
H264LiveFramedSource* liveSource = H264LiveFramedSource::createNew(envir());
if (liveSource == NULL)
{
return NULL;
}
//為視訊流建立Framer
return H264VideoStreamFramer::createNew(envir(), liveSource);
}
還需在h264LiveMediaServer.cpp中做相應的修改
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "h264LiveVideoServerMediaSubssion.hh"
UsageEnvironment* env;
// True:後啟動的客戶端總是從當前第一個客戶端已經播放到的位置開始播放
// False:每個客戶端都從頭開始播放影視訊檔案
Boolean reuseFirstSource = True;
//該函式列印相關資訊
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms, char const* streamName);
int main(int argc, char** argv)
{
//建立任務排程器並初始化使用環境
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
//建立RTSP伺服器,開始監聽模客戶端的連線
//注意這裡的埠號不是預設的554埠,因此訪問URL時,需指定該埠號
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
char const* descriptionString = "Session streamed by \"h264LiveMediaServer\"";
//流名字,媒體名
char const* streamName = "h264ESVideoTest";
//當客戶點播時,要輸入流名字streamName,告訴RTSP伺服器點播的是哪個流。
//建立媒體會話,流名字和檔名的對應關係是通過增加子會話建立起來的。媒體會話對會話描述、會話持續時間、流名字等與會話有關的資訊進行管理。
//第2個引數:媒體名、3:媒體資訊、4:媒體描述
ServerMediaSession* sms= ServerMediaSession::createNew(*env, streamName, streamName ,descriptionString);
//修改為自己實現的H264LiveVideoServerMediaSubssion
sms->addSubsession(H264LiveVideoServerMediaSubssion::createNew(*env, reuseFirstSource));
//為rtspserver新增session
rtspServer->addServerMediaSession(sms);
//答應資訊到標準輸出
announceStream(rtspServer, sms, streamName);
//進入事件迴圈,對套接字的讀取事件和對媒體檔案的延時傳送操作都在這個迴圈中完成。
env->taskScheduler().doEventLoop();
return 0;
}
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,char const* streamName)
{
char* url = rtspServer->rtspURL(sms);
UsageEnvironment& env = rtspServer->envir();
env << "\n\"" << streamName << "\" stream\"\n";
env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
}
關於spsdecode.h,詳見: H.264(H264)解碼SPS獲取解析度和幀率
三.測試
參考連結:http://blog.csdn.net/firehood_/article/details/16844397
相關文章
- live555實現共享記憶體視訊直播記憶體
- RTSP H264/HEVC 流 Wasm 播放ASM
- Nginx+FFmpeg實現rtsp流轉hls流,在WEB通過H5 video實現視訊播放NginxWebH5IDE
- 通過佇列實現棧OR通過棧實現佇列佇列
- FFmpeg實現監控攝像頭的RTSP協議轉RTMP協議直播協議
- 直播軟體搭建,通過Android DrawerLayout實現側邊欄功能Android
- Nginx+FFmpeg實現RTSP轉RTMPNginx
- 通過模板實現POI
- LRU 實現 通過 LinkedHashMapHashMap
- 通過redis實現session共享RedisSession
- C# 通過socket實現UDP 通訊C#UDP
- WPF中以MVVM方式,實現RTSP影片播放MVVM
- 通過 App Groups 實現程式間通訊APP
- 帶貨直播原始碼,淺談直播實現過程和技術原始碼
- frp 實現內網穿透(Windows 版)FRP內網穿透Windows
- 利用windows api實現程式通訊(命名管道)WindowsAPI
- LIVE555研究之三:LIVE555基礎
- Laravel 通過 Macros 實現列印原生 MySQLLaravelMacROSMySql
- 通過socket實現DUP程式設計程式設計
- 通過佇列實現批量處理佇列
- 如果通過流資料實現實時分析?
- Java的通過管道來實現執行緒通訊Java執行緒
- Extjs 通過 Ext.Direct 實現與 Laravel 通訊JSLaravel
- Windows 11實現錄屏直播,搭建Nginx的rtmp服務WindowsNginx
- Windows11實現錄屏直播,H5頁面直播 HLS ,不依賴FlashWindowsH5
- Swoole 中通過 process 模組實現多程式
- 通過Spring Boot Webflux實現Reactor KafkaSpring BootWebUXReactKafka
- 通過Go實現AES加密和解密工具Go加密解密
- Spring中通過Annotation來實現AOPSpring
- 通過API介面實現圖片上傳API
- Android通過WindowManager實現懸浮框Android
- Spring Boot 通過CORS實現跨域Spring BootCORS跨域
- Android 通過JNI實現守護程式Android
- 自定義通過PopupWindow實現通用選單
- .NET通過async/await實現並行AI並行
- Java的代理模式(通過公共介面實現)Java模式
- 直播協議詳解 RTMP、HLS、HTTP-FLV、WebRTC、RTSP協議HTTPWeb
- 智雲通CRM:如何通過平等交換條件,實現雙贏?