NDK交叉編譯ffmpeg
第一步:下載NDK
第二步:下載FFmpeg的原始碼,git clone https://github.com/FFmpeg/FFmpeg.git
第三步:編寫shell指令碼,編譯FFmpeg成.so庫
#!/bin/bash
echo "進入編譯ffmpeg指令碼"
NDK=/Users/zpw/Library/android-ndk-r13b
#5.0
PLATFORM=$NDK/platforms/android-21/arch-arm
TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64
CPU=armv7-a
#輸出路徑
PREFIX=./android/$CPU
function buildFF
{
echo "開始編譯ffmpeg"
./configure \
--prefix=$PREFIX \
--target-os=android \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--arch=arm \
--cpu=$CPU \
--sysroot=$PLATFORM \
--extra-cflags="$CFLAG" \
--cc=$TOOLCHAIN/bin/arm-linux-androideabi-gcc \
--nm=$TOOLCHAIN/bin/arm-linux-androideabi-nm \
--enable-shared \
--enable-runtime-cpudetect \
--enable-gpl \
--enable-small \
--enable-cross-compile \
--disable-debug \
--disable-static \
--disable-doc \
--disable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--disable-ffserver \
--disable-postproc \
--disable-avdevice \
--disable-symver \
--disable-stripping \
$ADD
make -j16
make install
echo "編譯結束!"
}
###########################################################
echo "編譯支援neon和硬解碼"
CPU=armv7-a
PREFIX=./android/armv7-a-neon-hard
CFLAG="-I$PLATFORM/usr/include -fPIC -DANDROID -mfpu=neon -mfloat-abi=softfp "
ADD="--enable-asm \
--enable-neon \
--enable-jni \
--enable-mediacodec \
--enable-decoder=h264_mediacodec \
--enable-hwaccel=h264_mediacodec "
buildFF
###########################################################
echo "編譯不支援neon和硬解碼"
CPU=armv7-a
PREFIX=./android/$CPU
CFLAG="-I$PLATFORM/usr/include -fPIC -DANDROID -mfpu=vfp -mfloat-abi=softfp "
ADD=
buildFF
複製程式碼
1、修改configure檔案,替換以下幾個欄位的值:
#SLIBNAME_WITH_MAJOR='$(SLIBNAME).$(LIBMAJOR)'
#LIB_INSTALL_EXTRA_CMD='$$(RANLIB) "$(LIBDIR)/$(LIBNAME)"'
#SLIB_INSTALL_NAME='$(SLIBNAME_WITH_VERSION)'
#SLIB_INSTALL_LINKS='$(SLIBNAME_WITH_MAJOR) $(SLIBNAME)'
SLIBNAME_WITH_MAJOR='$(SLIBPREF)$(FULLNAME)-$(LIBMAJOR)$(SLIBSUF)'
LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"'
SLIB_INSTALL_NAME='$(SLIBNAME_WITH_MAJOR)'
SLIB_INSTALL_LINKS='$(SLIBNAME)'
複製程式碼
2、執行shell指令碼,開始進行編譯。建議最好不要下載最新的版本,因為最新的版本在NDK交叉編譯時,會報各有檔案找
不到的錯誤。
注意:進入到ffmpeg(cd
ffmpeg-3.3.6/) 所在的目錄下面,執行 ./build_android.sh。如果無法執行指令碼,可能是沒有許可權,可以使用下面命令
chmod 755 ./build_android.sh
第四步:匯入專案中,使用Android Studio 並執行專案
1、建立一個工程,使用Android Studio,在建立時記得勾選上
2、配置gradle檔案,如下:externalNativeBuild {
cmake {
cppFlags "-std=c++11 -frtti -fexceptions"
arguments "-DANDROID_STL=c++_shared"
}
}
複製程式碼
ndk {
abiFilters 'armeabi-v7a'
}
複製程式碼
sourceSets.main {
jniLibs.srcDirs = ['src/main/jniLibs']
}
複製程式碼
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
複製程式碼
3、編輯 CMakeLists.txt 檔案:
#CMake版本資訊
cmake_minimum_required(VERSION 3.4.1)
#支援-std=gnu++11
set(CMAKE_VERBOSE_MAKEFILE on)
message("--I print it:----------->>>>>>>'${CMAKE_CXX_FLAGS}'")
project( libFFmpeg )
message("Checking CMAKE_SYSTEM_NAME = '${CMAKE_SYSTEM_NAME}'")
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
add_definitions(-DOS_OSX)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
add_definitions(-DOS_LINUX)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Windows")
add_definitions(-DOS_WIN)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Android")
add_definitions(-DOS_ANDROID)
message("Checking CMAKE_ABI_NAME = '${CMAKE_ANDROID_ARCH_ABI}'")
else()
message("OS not detected.")
endif()
### 配置資源目錄
add_library( MyFFmpeg
SHARED
src/main/cpp/native-lib.cpp )
find_library( log-lib
log )
find_library( android-lib
android )
# 設定ffmpeg庫所在路徑的目錄
set(distribution_DIR ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI})
add_library( avutil-55
SHARED
IMPORTED )
set_target_properties( avutil-55
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/libavutil-55.so)
add_library( swresample-2
SHARED
IMPORTED )
set_target_properties( swresample-2
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/libswresample-2.so)
add_library( avfilter-6
SHARED
IMPORTED )
set_target_properties( avfilter-6
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/libavfilter-6.so)
add_library( avformat-57
SHARED
IMPORTED )
set_target_properties( avformat-57
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/libavformat-57.so)
add_library( swscale-4
SHARED
IMPORTED )
set_target_properties( swscale-4
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/libswscale-4.so)
add_library( avcodec-57
SHARED
IMPORTED )
set_target_properties( avcodec-57
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/libavcodec-57.so)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")
include_directories(src/main/cpp)
# 新增ffmpeg標頭檔案路徑
include_directories(src/main/jniLibs/include)
target_link_libraries(MyFFmpeg
avcodec-57
avfilter-6
avformat-57
avutil-55
swresample-2
swscale-4
${log-lib}
${android-lib})
複製程式碼
4、載入自己編譯的so庫,不要加lib和.so等前後輟:
5、編譯寫cpp檔案:#include "FFmpegHeader.h"
using namespace std;
/*
* Class: com_function_ianchang_libffmpeg_FFmpegUtils
* Method: testFFmpeg
* Signature: (Ljava/lang/String;)Ljava/lang/String;
*/
extern "C"
JNIEXPORT jstring JNICALL
Java_com_function_ianchang_libffmpeg_FFmpegUtils_testFFmpeg
(JNIEnv *env, jclass, jstring){
std::string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}
extern "C"
JNIEXPORT void JNICALL
METHOD_NAME(paly)(JNIEnv *env, jclass, jstring path, jobject view){
const char* file = env->GetStringUTFChars(path, 0);
if(file == NULL){
LOGD("The file is a null object.");
}
LOGD("file=%s", file);
//註冊所有的編解碼器
av_register_all();
// avcodec_register_all();
int ret;
//封裝格式上線文
AVFormatContext *fmt_ctx = avformat_alloc_context();
//開啟輸入流並讀取標頭檔案。此時編解碼器還沒有開啟
if(avformat_open_input(&fmt_ctx, file, NULL, NULL) < 0){
return;
}
//獲取資訊
if(avformat_find_stream_info(fmt_ctx, NULL) < 0){
return;
}
//獲取視訊流的索引位置
int video_stream_index = -1;
for (int i = 0; i < fmt_ctx->nb_streams; i++) {
if(fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
video_stream_index = i;
LOGE("找到視訊流索引位置video_stream_index=%d",video_stream_index);
break;
}
}
if (video_stream_index == -1){
LOGE("未找到視訊流索引");
}
ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,view);
if (nativeWindow == NULL) {
LOGE("ANativeWindow_fromSurface error");
return;
}
//繪製時候的緩衝區
ANativeWindow_Buffer outBuffer;
//獲取視訊流解碼器
AVCodecContext *codec_ctx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(codec_ctx, fmt_ctx->streams[video_stream_index]->codecpar);
AVCodec *avCodec = avcodec_find_decoder(codec_ctx->codec_id);
//開啟解碼器
if((ret = avcodec_open2(codec_ctx,avCodec,NULL)) < 0){
ret = -3;
return;
}
//迴圈從檔案讀取一幀壓縮資料
//開始讀取視訊
int y_size = codec_ctx->width * codec_ctx->height;
AVPacket *pkt = (AVPacket *)malloc(sizeof(AVPacket));//分配一個packet
av_new_packet(pkt,y_size);//分配packet的資料
AVFrame *yuvFrame = av_frame_alloc();
AVFrame *rgbFrame = av_frame_alloc();
// 顏色轉換器
SwsContext *m_swsCtx = sws_getContext(codec_ctx->width, codec_ctx->height, codec_ctx->pix_fmt, codec_ctx->width,
codec_ctx->height, AV_PIX_FMT_RGBA, SWS_BICUBIC, NULL, NULL, NULL);
//int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, codec_ctx->width, codec_ctx->height, 1);
//uint8_t *out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
LOGE("開始解碼");
int index = 0;
while (1){
if(av_read_frame(fmt_ctx,pkt) < 0){
//這裡就認為視訊讀完了
break;
}
if(pkt->stream_index == video_stream_index) {
//視訊解碼
ret = avcodec_send_packet(codec_ctx, pkt);
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
LOGE("avcodec_send_packet ret=%d", ret);
av_packet_unref(pkt);
continue;
}
//從解碼器返回解碼輸出資料
ret = avcodec_receive_frame(codec_ctx, yuvFrame);
if (ret < 0 && ret != AVERROR_EOF) {
LOGE("avcodec_receive_frame ret=%d", ret);
av_packet_unref(pkt);
continue;
}
//avcodec_decode_video2(codec_ctx,yuvFrame,&got_pictue,&pkt);
sws_scale(m_swsCtx, (const uint8_t *const *) yuvFrame->data, yuvFrame->linesize, 0,
codec_ctx->height, rgbFrame->data, rgbFrame->linesize);
//設定緩衝區的屬性
ANativeWindow_setBuffersGeometry(nativeWindow, codec_ctx->width, codec_ctx->height,
WINDOW_FORMAT_RGBA_8888);
ret = ANativeWindow_lock(nativeWindow, &outBuffer, NULL);
if (ret != 0) {
LOGE("ANativeWindow_lock error");
return;
}
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize,
(const uint8_t *) outBuffer.bits, AV_PIX_FMT_RGBA,
codec_ctx->width, codec_ctx->height, 1);
//fill_ANativeWindow(&outBuffer,outBuffer.bits,rgbFrame);
//將緩衝區資料顯示到surfaceView
ret = ANativeWindow_unlockAndPost(nativeWindow);
if (ret != 0) {
LOGE("ANativeWindow_unlockAndPost error");
return;
}
LOGE("成功顯示到緩衝區%d次",++index);
}
av_packet_unref(pkt);
usleep(1000*16);
// /*
// //UYVY
// fwrite(pFrameYUV->data[0],(pCodecCtx->width)*(pCodecCtx->height),2,output);
// //YUV420P
// fwrite(pFrameYUV->data[0],(pCodecCtx->width)*(pCodecCtx->height),1,output);
// fwrite(pFrameYUV->data[1],(pCodecCtx->width)*(pCodecCtx->height)/4,1,output);
// fwrite(pFrameYUV->data[2],(pCodecCtx->width)*(pCodecCtx->height)/4,1,output);
}
//av_free(out_buffer);
av_frame_free(&rgbFrame);
avcodec_close(codec_ctx);
sws_freeContext(m_swsCtx);
avformat_close_input(&fmt_ctx);
ANativeWindow_release(nativeWindow);
env->ReleaseStringUTFChars(path, file);
LOGI("解析完成");
}
複製程式碼
6、呼叫方式,如下:
public void playVideo(View view){
File resFile = new File(resVideo);
Log.d("TAG", "playVideo->resFile = "+resFile.getPath());
Log.d("TAG", "playVideo->resFile = "+resFile.exists());
ffmpeg.paly(resVideo, surfaceView.getHolder().getSurface());
}
複製程式碼