Android基於fms的視屏通訊咋做
package net.john.activity;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.Map;
import net.john.R;
import net.john.util.RTMPConnectionUtil;
import net.john.util.RemoteUtil;
import net.john.util.UltraNetStream;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.Build;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import com.shouyanwang.h264encoder;
import com.smaxe.io.ByteArray;
import com.smaxe.uv.client.INetStream;
import com.smaxe.uv.client.NetStream;
import com.smaxe.uv.client.camera.AbstractCamera;
import com.smaxe.uv.stream.support.MediaDataByteArray;
public class VideoActivity extends Activity{
final String TAG = "VideoActivity";
private boolean active;
public static AndroidCamera aCamera;
private h264encoder mH264encoder;
private long handle;
private Context context;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.layout_chat);
context=this;
mH264encoder = new h264encoder();
aCamera = new AndroidCamera(VideoActivity.this);
//
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
// for(int i=0; Camera.getNumberOfCameras();i++){
//
// CameraInfo cameraInfo=new CameraInfo();
// Camera.getCameraInfo(i, cameraInfo);
// if(cameraInfo.facing==CameraInfo.CAMERA_FACING_FRONT){
//
// }
// }
new Thread(){
@Override
public void run() {
AndroidCamera androidCamera=new AndroidCamera(VideoActivity.this);
androidCamera.startVideo();
}
};
active = true;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
new AlertDialog.Builder(VideoActivity.this)
.setMessage(R.string.dialog_exit)
.setPositiveButton(R.string.dialog_ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
active = false;
finish();
}
})
.setNegativeButton(R.string.dialog_cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
dialog.dismiss();
}
}).show();
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}
@Override
public void onStop() {
super.onStop();
aCamera = null;
if (RTMPConnectionUtil.netStream != null) {
RTMPConnectionUtil.netStream.close();
}
Log.d("DEBUG", "onStop");
}
@Override
public void onDestroy() {
super.onDestroy();
Log.d("DEBUG", "onDestroy()");
}
//
public class AndroidCamera extends AbstractCamera implements SurfaceHolder.Callback, Camera.PreviewCallback {
private static final int PICK_FROM_CAMERA = 0;
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private Camera camera;
private int width;
private int height;
private boolean init;
int blockWidth;
int blockHeight;
int timeBetweenFrames; // 1000 / frameRate
int frameCounter;
byte[] previous;
public AndroidCamera(Context context) {
surfaceView = (SurfaceView)((Activity) context).findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(AndroidCamera.this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
width = 352;
height = 288;
handle = mH264encoder.initEncoder(width, height);
init = false;
Log.d("DEBUG", "AndroidCamera()");
}
private void startVideo() {
Log.d("DEBUG", "startVideo()");
//連線服務端
RTMPConnectionUtil.netStream = new UltraNetStream(RTMPConnectionUtil.connection);
RTMPConnectionUtil.netStream.addEventListener(new NetStream.ListenerAdapter() {
@Override
public void onNetStatus(final INetStream source, final Map<String, Object> info){
Log.d("DEBUG", "Publisher#NetStream#onNetStatus: " + info);
final Object code = info.get("code");
if (NetStream.PUBLISH_START.equals(code)) {
if (VideoActivity.aCamera != null) {
RTMPConnectionUtil.netStream.attachCamera(aCamera, -1 );
Log.d("DEBUG", "aCamera.start()");
aCamera.start();
} else {
Log.d("DEBUG", "camera == null");
}
}
}
});
RTMPConnectionUtil.netStream.publish("aaa", NetStream.RECORD);
}
public void start() {
System.out.println("start");
camera.startPreview();
}
public void printHexString(byte[] b) {
System.out.println("printHexString");
for (int i = 0; i < b.length; i++) {
String hex = Integer.toHexString(b[i] & 0xFF);
if (hex.length() == 1) {
hex = '0' + hex;
}
Log.i(TAG, "陣列16進位制內容:"+hex.toUpperCase());
}
}
@Override
public void onPreviewFrame(byte[] arg0, Camera arg1) {
System.out.println("onPreviewFrame");
// TODO Auto-generated method stub
if (!active) return;
if (!init) {
blockWidth = 32;
blockHeight = 32;
timeBetweenFrames = 100; // 1000 / frameRate
frameCounter = 0;
previous = null;
init = true;
}
final long ctime = System.currentTimeMillis();
Log.i(TAG, "採集到的陣列的長度:"+arg0.length);
byte[] current = RemoteUtil.decodeYUV420SP2RGB(arg0, width, height);
try {
int byte_result = Decode(arg0);
byte[] bytes1 = copyOf(out,byte_result);
Log.i(TAG, "byte陣列的長度:"+bytes1.length);
final byte[] packet = RemoteUtil.encode(current, previous, blockWidth, blockHeight, width, height);
fireOnVideoData(new MediaDataByteArray(timeBetweenFrames, new ByteArray(packet)));
previous = current;
if (++frameCounter % 10 == 0) previous = null;
}
catch (Exception e) {
e.printStackTrace();
}
final int spent = (int) (System.currentTimeMillis() - ctime);
try {
Log.i(TAG, "執行緒等待:"+Math.max(0, timeBetweenFrames - spent)+" s");
Thread.sleep(Math.max(0, timeBetweenFrames - spent));
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public byte[] copyOf(byte[] arr,int len)
{
System.out.println("copyOf");
Class type=arr.getClass().getComponentType();
byte[] target=(byte[])Array.newInstance(type, len);
System.arraycopy(arr, 0, target, 0, len);
return target;
}
private byte[] out = new byte[20*1024];
long start = 0;
long end = 0;
private int Decode(byte[] yuvData){
start = System.currentTimeMillis();
int result = mH264encoder.encodeframe(handle, -1, yuvData, yuvData.length, out);
end = System.currentTimeMillis();
Log.e(TAG, "encode result:"+result+"--encode time:"+(end-start));
if(result > 0){
try {
FileOutputStream file_out = new FileOutputStream ("/sdcard/x264_video_activity.264",true);
file_out.write(out,0,result);
file_out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
// this.setPrewDataGetHandler();
return result;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
camera.startPreview();
camera.unlock();
new Thread(){
@Override
public void run() {
AndroidCamera androidCamera=new AndroidCamera(context);
androidCamera.startVideo();
}
};
// startVideo();
Log.d("DEBUG", "surfaceChanged()");
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
//拍照功能
// Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
//
// intent.putExtra("camerasensortype", 1); // 呼叫前置攝像頭
// intent.putExtra("autofocus", true); // 自動對焦
// intent.putExtra("fullScreen", false); // 全屏
// intent.putExtra("showActionIcons", false);
//
// startActivityForResult(intent, PICK_FROM_CAMERA);
camera = Camera.open();
if(camera!=null){
try {
camera.setPreviewDisplay(surfaceHolder);
camera.setPreviewCallback(this);
Camera.Parameters params = camera.getParameters();
params.setPreviewSize(width, height);
camera.setParameters(params);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
camera.release();
camera = null;
}
}else{
System.out.println("no!");
}
Log.d("DEBUG", "surfaceCreated()");
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
mH264encoder.destory(handle);
Log.d("DEBUG", "surfaceDestroy()");
}
} //AndroidCamera
}
來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/2618/viewspace-2816899/,如需轉載,請註明出處,否則將追究法律責任。
相關文章
- 基於Linphone開發Android音視訊通話Android
- FMS 流媒體視訊技術
- 如何基於 Agora Android SDK 在應用中實現視訊通話?GoAndroid
- 基於 Electron 做視訊會議的兩種實現方式
- Android錄製視訊的全面屏適配Android
- 基於 NanoHttpd 的 Android 視訊伺服器開發NaNhttpdAndroid伺服器
- Android學習筆記--基於XMPP的即時通訊Android筆記
- 基於 WebRTC 和 WebVR 實現 VR 視訊通話WebVR
- Android 音視訊 遇上鎖屏那些事Android
- 基於HDPHP的視訊播客開發視訊PHP
- 基於Android和WI-FI通訊的智慧家居系統Android
- Android基於UDP的區域網聊天通訊(有完整Demo)AndroidUDP
- 基於聲網 Flutter SDK 實現多人視訊通話Flutter
- android 視訊通話相關Android
- iOS基於Socket.io即時通訊IM實現,WebRTC實現視訊通話iOSWeb
- Android 拍攝(橫 豎屏)視訊的懶人之路Android
- Android 拍攝(橫 \ 豎屏)視訊的懶人之路Android
- 基於 Agora SDK 實現 Windows 端的一對一視訊通話(基於3.6.2版本)GoWindows
- 基於海量詞庫的單詞拼寫檢查、推薦到底是咋做的?
- 基於雲端的視訊剪輯工具
- 基於canvas的視訊遮罩外掛Canvas遮罩
- 基於 ThinkJS 的 WebSocket 通訊詳解JSWeb
- Android多媒體之視訊播放器(基於MediaPlayer)Android播放器
- 基於XMPP協議開發Android即時通訊軟體協議Android
- 基於XMPP協議Android即時通訊開源應用協議Android
- Android之基於XMPP協議即時通訊軟體(一)Android協議
- 如何基於 Flutter 快速實現一個視訊通話應用Flutter
- 基於環信實現實時視訊語音通話功能
- JAVA - 基於Socket的多執行緒通訊Java執行緒
- 基於WebSocket的modbus通訊(一)- 伺服器Web伺服器
- 基於WebSocket的modbus通訊(二)- 客戶端Web客戶端
- 基於surging 如何利用peerjs進行語音視訊通話JS
- Flutter如何和Native通訊-Android視角FlutterAndroid
- 基於 swoole擴充套件 的即時通訊 im套件
- 基於OpenSSL的HTTPS通訊C++實現HTTPC++
- 基於單連結串列的班級通訊錄
- 通訊原理:基於MATLAB的AM調幅分析Matlab
- [iptables] 基於iptables實現的跨網路通訊