IOS技術分享| anyLive 開源專案

anyRTC發表於2022-03-02

anyLive 簡介

anyLive 是 anyRTC 開源的推拉流專案。採用跨平臺架構設計,一套程式碼支援Android、iOS、Windows、Mac、Ubuntu等平臺。本文主要介紹anyLive iOS平臺的實現。

原始碼下載

anylive_github

開發環境

  • 開發工具:Xcode13   真機執行

  • 開發語言:Objective-C、Swift

  • 實現:推拉流。

平臺相容

系統 編譯環境 CPU架構
Android 4.4及以上 Android Studio、NDK armeabi-v7a、arm64-v8a
iOS 9.0及以上 Xcode13 arm64
Windows 7及以上 VS2015,VS2017 x86、x86-64

專案結構

anyLive 實現了推流、拉流、螢幕共享、美顏等功能。

anylive_all

示例程式碼

效果展示

anylive_main

程式碼實現



   var menus = [
       [MenuItem(imageName: "icon_push", title: "直播推流", subTitle: "採用WebRTC底層架構,支援RTMP/HLS/HTTP-FLV")],
       [MenuItem(imageName: "icon_pull", title: "直播拉流(播放)", subTitle: "低功直播播放器,支援軟硬解切換,橫豎切換、低延遲等")],
       [MenuItem(imageName: "icon_video", title: "小視訊播放", subTitle: "支援首屏秒開、清晰度無縫切換、位元速率自適應等多種特性")]
   ]

   let identifier = "ARLiveMainCell"
   lazy var identifierArr: [String] = {
       ["Live_JoinVC", "Player_JoinVC", "Video_JoinVC"]
   }()

   override func viewDidLoad() {
       super.viewDidLoad()

       // Uncomment the following line to preserve selection between presentations
       // self.clearsSelectionOnViewWillAppear = false

       // Uncomment the following line to display an Edit button in the navigation bar for this view controller.
       // self.navigationItem.rightBarButtonItem = self.editButtonItem
       let label = UILabel(frame: CGRectZero)

       label.textColor = UIColor(hexString: "#C4C4CE")
       label.font = UIFont(name: PingFang, size: 12)
       label.textAlignment = .center
       label.text = "Power by anyRTC"
       view.addSubview(label)

       liveEngine = ARLiveEngineKit(delegate: nil)
   }

   override func viewWillAppear(_ animated: Bool) {
       super.viewWillAppear(animated)
       navigationController?.setNavigationBarHidden(true, animated: true)
   }

   // MARK: - Table view data source

   override func numberOfSections(in tableView: UITableView) -> Int {
       return menus.count
   }

   override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
       // #warning Incomplete implementation, return the number of rows
       return menus[section].count
   }

   override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
       let cell: ARMainCell = tableView.dequeueReusableCell(withIdentifier: identifier, for: indexPath) as! ARMainCell

       // Configure the cell...
       let menuItem = menus[indexPath.section][indexPath.row]
       cell.mainImageView.image = UIImage(named: menuItem.imageName)
       cell.mainLabel.text = menuItem.title
       cell.subLabel.text = menuItem.subTitle
       cell.expectedImageView.isHidden = (indexPath.section != 2)
       return cell
   }

   override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
       if indexPath.section != 2 {
           guard let vc = storyboard?.instantiateViewController(withIdentifier: identifierArr[indexPath.section]) else { return }
           navigationController?.pushViewController(vc, animated: true)
       } else {
           ARToast.showText(text: " Please look forward!", duration: 1.0)
       }

效果展示(推流)

anylive_live

程式碼實現
    func initializePusher() {

       /// 例項化推流物件
       livePusher = liveEngine!.createArLivePusher()
       livePusher.setDelegate(self)
       
       /// 設定推流視訊編碼引數
       let param = ARLiveVideoEncoderParam(resolution!)
       livePusher.setVideoQuality(param)
       
       livePusher.startCamera(true)
       livePusher.startMicrophone()
       
       /// 設定本地攝像頭預覽 View
       livePusher.setupCameraRender(renderView)
       livePusher.setRenderFill(.fill)
       
       /// 開始推流
       livePusher.startPush(pushUrl)
   }
   
   // MARK: - ARLivePushDelegate

extension ArLiveViewController: ARLivePushDelegate {
   func onError(_ code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
       /// 直播推流器錯誤通知,推流器出現錯誤時,會回撥該通知
       Logger.log(message: "onError \(code.rawValue)", level: .error)
   }
   
   func onWarning(_ code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
       /// 直播推流器警告通知
       Logger.log(message: "onWarning \(code.rawValue)", level: .warning)
   }
   
   func onCaptureFirstAudioFrame() {
       /// 首幀音訊採集完成的回撥通知
       Logger.log(message: "onCaptureFirstAudioFrame", level: .info)
   }
   
   func onCaptureFirstVideoFrame() {
       /// 首幀視訊採集完成的回撥通知
       Logger.log(message: "onCaptureFirstVideoFrame", level: .info)
   }
   
   func onMicrophoneVolumeUpdate(_ volume: Int) {
       /// 麥克風採集音量值回撥
       Logger.log(message: "onMicrophoneVolumeUpdate volume = \(volume)", level: .info)
   }
   
   func onPushStatusUpdate(_ status: ARLivePushStatus, message msg: String?, extraInfo: [AnyHashable: Any]?) {
       /// 推流器連線狀態回撥通知
       Logger.log(message: "onPushStatusUpdate status = \(status.rawValue)", level: .info)
       stateLabel.text = "\(status.description)"
   }
   
   func onStatisticsUpdate(_ statistics: ARLivePusherStatistics) {
       /// 直播推流器統計資料回撥
       // Logger.log(message: "onStatisticsUpdate width = \(statistics.width), height = \(statistics.height), fps = \(statistics.fps), videoBitrate = \(statistics.videoBitrate), audioBitrate = \(statistics.audioBitrate)", level: .info)
   }
   
   func onSnapshotComplete(_ image: UIImage) {
       /// 截圖回撥
       Logger.log(message: "onSnapshotComplete", level: .info)
   }
}
效果展示(拉流)

arlive_pull

程式碼實現
    func initializePlayer() {

       /// 建立拉流例項物件
       livePlayer = liveEngine!.createArLivePlayer()
       livePlayer.setDelegate(self)
       
       /// 設定播放器的視訊渲染 View
       livePlayer.setRenderView(renderView)
       livePlayer.setRenderFill(renderMode)
       
       /// 設定播放器快取自動調整的最小和最大時間 ( 單位:秒 )
       livePlayer.setCacheParams(1.0, maxTime: 100)
       
       /// 開始播放音視訊流
       livePlayer.startPlay(pullUrl)
   }
   
   // MARK: - ARLivePlayDelegate

extension ArPlayerViewController: ARLivePlayDelegate {
   func onError(_ player: ARLivePlayer, code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
       /// 直播播放器錯誤通知,播放器出現錯誤時,會回撥該通知
       Logger.log(message: "onError code = \(code.rawValue)", level: .info)
   }
   
   func onWarning(_ player: ARLivePlayer, code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
       /// 直播播放器警告通知
       Logger.log(message: "onWarning code = \(code.rawValue)", level: .info)
   }
   
   func onVideoPlayStatusUpdate(_ player: ARLivePlayer, status: ARLivePlayStatus, reason: ARLiveStatusChangeReason, extraInfo: [AnyHashable: Any]?) {
       /// 直播播放器視訊狀態變化通知
       Logger.log(message: "onVideoPlayStatusUpdate status = \(status.rawValue), reason = \(reason.rawValue)", level: .info)
       liveStatus = status
       stateLabel.text = "\(status.description)"
   }
   
   func onAudioPlayStatusUpdate(_ player: ARLivePlayer, status: ARLivePlayStatus, reason: ARLiveStatusChangeReason, extraInfo: [AnyHashable: Any]?) {
       /// 直播播放器音訊狀態變化通知
       Logger.log(message: "onAudioPlayStatusUpdate status = \(status.rawValue) reason = \(reason.rawValue)", level: .info)
   }
   
   func onPlayoutVolumeUpdate(_ player: ARLivePlayer, volume: Int) {
       /// 播放器音量大小回撥
       Logger.log(message: "onPlayoutVolumeUpdate volume = \(volume)", level: .info)
   }
   
   func onStatisticsUpdate(_ player: ARLivePlayer, statistics: ARLivePlayerStatistics?) {
       /// 直播播放器統計資料回撥
       if statistics != nil {
           Logger.log(message: "onStatisticsUpdate width = \(statistics!.width), height =\(statistics!.height), fps = \(statistics!.fps), videoBitrate = \(statistics!.videoBitrate), audioBitrate = \(statistics!.audioBitrate)", level: .info)
       }
   }
   
   func onSnapshotComplete(_ player: ARLivePlayer, image: UIImage) {
       /// 截圖回撥
       UIImageWriteToSavedPhotosAlbum(image, self, #selector(saveImage(image:didFinishSavingWithError:contextInfo:)), nil)
       
       NSObject.cancelPreviousPerformRequests(withTarget: self, selector: #selector(removeSnapshot), object: nil)
       snapImageView.image = image
       
       let imageWidth = image.size.width/2
       let imageHeight = image.size.height/2
       snapImageView.frame = CGRect(x: ARScreenWidth - imageWidth - 24, y: 150, width: imageWidth, height: imageHeight)
       view.addSubview(snapImageView)
       perform(#selector(removeSnapshot), with: nil, afterDelay: 2)
       
       Logger.log(message: "onSnapshotComplete sucess, imageWidth = \(image.size.width), imageHeight = \(image.size.height)", level: .info)
   }
   
   func onRenderVideoFrame(_ player: ARLivePlayer, frame videoFrame: ARLiveVideoFrame?) {
       /// 自定義視訊渲染回撥
       Logger.log(message: "onRenderVideoFrame", level: .info)
   }
   
   func onReceiveSeiMessage(_ player: ARLivePlayer, payloadType: Int32, data: Data?) {
       /// 收到 SEI 訊息的回撥
       Logger.log(message: "onReceiveSeiMessage payloadType = \(payloadType)", level: .info)
   }
}
效果展示(螢幕共享)

arlive_screen

程式碼實現
    override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {

       DispatchQueue.main.async {
           switch sampleBufferType {
           case RPSampleBufferType.video:
               // Handle video sample buffer
               ARUploader.sendVideoBuffer(sampleBuffer)
           case RPSampleBufferType.audioApp:
               // Handle audio sample buffer for app audio
               ARUploader.sendAudioAppBuffer(sampleBuffer)
           case RPSampleBufferType.audioMic:
               // Handle audio sample buffer for mic audio
               ARUploader.sendAudioMicBuffer(sampleBuffer)
               break
           @unknown default:
               // Handle other sample buffer types
               fatalError("Unknown type of sample buffer")
           }
       }
   }

   private static let liverPusher: ARLivePusher = {
       let livePusher = liveEngine.createArLivePusher()
       
       let screenSize = UIScreen.main.currentMode?.size
       let screenWidth = screenSize?.width
       let screenHeight = screenSize?.height
       /// 設定推流視訊編碼引數
       let videoParam = ARLiveVideoEncoderParam()
       videoParam.videoResolution = .resolution640x480
       videoParam.videoResolutionMode = .portrait
       videoParam.videoScaleMode = .fit
       livePusher.setVideoQuality(videoParam)
       livePusher.startMicrophone()
       
       /// 開啟自採集
       livePusher.enableCustomAudioCapture(true)
       livePusher.enableCustomVideoCapture(true)
       /// 開始推流
       livePusher.startPush(<#T##String#>)
       return livePusher
   }()
   
       static func sendAudioAppBuffer(_ sampleBuffer: CMSampleBuffer) {
       ARAudioTube.liverPusher(liverPusher, pushAudioCMSampleBuffer: sampleBuffer, resampleRate: audioSampleRate, type: .app)
   }
   
   static func sendAudioMicBuffer(_ sampleBuffer: CMSampleBuffer) {
       ARAudioTube.liverPusher(liverPusher, pushAudioCMSampleBuffer: sampleBuffer, resampleRate: audioSampleRate, type: .mic)
   }

結束語

最後,因時間有限專案中還存在一些bug和待完善的功能點。僅供參考,歡迎大家fork。有不足之處歡迎大家指出issues。最後再貼一下 Github開源下載地址。

Github開源下載地址

如果覺得不錯,希望點個star~

來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/70013909/viewspace-2861826/,如需轉載,請註明出處,否則將追究法律責任。

相關文章