NDK--利用FFmpeg进行安卓原生绘制播放

2020-07-03 10:25:37 浏览数 (3)

上次说到视频编码和转码,得到了yuv格式的文件,我们平常看到的视频画面实际都是yuv格式,今天来实现在APP中播放视频,代码和上次差不多,只需将转码文件换成输出到屏幕上
1.自定义View继承SurfaceView,把SurfaceView传入native层进行绘制
代码语言:javascript复制
package com.aruba.ffmpegapplication;

import android.content.Context;
import android.graphics.PixelFormat;
import android.util.AttributeSet;
import android.view.Surface;
import android.view.SurfaceView;

import java.lang.ref.WeakReference;

/**
 * Created by aruba on 2020/6/30.
 */
public class YuvPlayView extends SurfaceView {
    static {
        System.loadLibrary("native-lib");
    }

    private PlayThread playThread;

    public YuvPlayView(Context context) {
        this(context, null);
    }

    public YuvPlayView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public YuvPlayView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        init();
    }

    private void init() {
        setKeepScreenOn(true);
        //设置一个像素占4字节
        getHolder().setFormat(PixelFormat.RGBA_8888);

        playThread = new PlayThread(this);
    }

    /**
     * 开始播放
     */
    public void play(String filePath) {
        if (isPlaying()) {
            playThread.stopPlay();
        }

        playThread = new PlayThread(this);
        playThread.filePath = filePath;
        playThread.start();
    }

    public void stop() {
        if (isPlaying()) {
            playThread.stopPlay();
        }
    }

    public boolean isPlaying() {
        return playThread.isPlaying();
    }

    @Override
    protected void onDetachedFromWindow() {
        super.onDetachedFromWindow();
        stop();
    }

    static class PlayThread extends Thread {
        public boolean isPlaying;
        public String filePath;
        private WeakReference<YuvPlayView> playViewWeakReference;

        public PlayThread(YuvPlayView yuvPlayView) {
            this.playViewWeakReference = new WeakReference<>(yuvPlayView);
        }

        @Override
        public void run() {
            isPlaying = true;
            //阻塞方法,不断将视频内容绘制到屏幕
            if (playViewWeakReference.get() != null) {
                int ret = render(filePath, playViewWeakReference.get().getHolder().getSurface());
            }
            isPlaying = false;
        }

        public boolean isPlaying() {
            return isPlaying;
        }

        public native int render(String filePath, Surface surface);

        public native void stopPlay();
    }

}
编写相应的native方法
代码语言:javascript复制
#include <jni.h>
#include <string>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <unistd.h>

extern "C" {
//编码
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
//像素处理
#include "libswscale/swscale.h"
}

#define  LOG_TAG    "aruba"
#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)

bool is_playing = false;

extern "C"
JNIEXPORT jint JNICALL
Java_com_aruba_ffmpegapplication_YuvPlayView_00024PlayThread_render(JNIEnv *env, jobject instance,
                                                                    jstring filePath_,
                                                                    jobject surface) {
    const char *filePath = env->GetStringUTFChars(filePath_, 0);

    //注册FFmpeg中各大组件
    av_register_all();

    //打开文件
    AVFormatContext *formatContext = avformat_alloc_context();
    if (avformat_open_input(&formatContext, filePath, NULL, NULL) != 0) {
        LOGE("打开失败");
        avformat_free_context(formatContext);
        env->ReleaseStringUTFChars(filePath_, filePath);
        return 1;
    }

    //将文件信息填充进AVFormatContext
    if (avformat_find_stream_info(formatContext, NULL) < 0) {
        LOGE("获取文件信息失败");
        avformat_free_context(formatContext);
        env->ReleaseStringUTFChars(filePath_, filePath);
        return 2;
    }

    //获取视频流的编解码器上下文
    AVCodecContext *codecContext = NULL;
    int vidio_stream_idx = -1;
    for (int i = 0; i < formatContext->nb_streams;   i) {
        if (formatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {//如果是视频流
            codecContext = formatContext->streams[i]->codec;
            vidio_stream_idx = i;
            break;
        }
    }

    if (codecContext == NULL) {
        LOGE("获取编解码器上下文失败");
        avformat_free_context(formatContext);
        env->ReleaseStringUTFChars(filePath_, filePath);
        return 3;
    }

    //根据编解码器上下文的id获取视频流解码器
    AVCodec *codec = avcodec_find_decoder(codecContext->codec_id);
    //打开解码器
    if (avcodec_open2(codecContext, codec, NULL) < 0) {
        LOGE("打开解码失败");
        avformat_free_context(formatContext);
        env->ReleaseStringUTFChars(filePath_, filePath);
        return 4;
    }

    //开始读每一帧
    is_playing = true;

    //存放压缩数据
    AVPacket *pkt = (AVPacket *) (av_malloc(sizeof(AVPacket)));
    av_init_packet(pkt);

    //存放解压数据
    AVFrame *picture = av_frame_alloc();

    //存放转码数据
    AVFrame *picture_rgb = av_frame_alloc();
    //为转码数据分配内存
    uint8_t *data_size = (uint8_t *) (av_malloc(
            (size_t) avpicture_get_size(AV_PIX_FMT_RGBA, codecContext->width,
                                        codecContext->height)));
    avpicture_fill((AVPicture *) picture_rgb, data_size, AV_PIX_FMT_RGBA, codecContext->width,
                   codecContext->height);

    int picture_ptr = 0;

    //获取底层绘制对象
    ANativeWindow *aNativeWindow = ANativeWindow_fromSurface(env, surface);
    //为Window配置长宽和像素编码
    ANativeWindow_setBuffersGeometry(aNativeWindow, codecContext->width, codecContext->height,
                                     AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM);
    //视频缓存区 
    ANativeWindow_Buffer out_buff;

    //转码组件上下文,前三个参数为原视频的宽高和编码,后三个为转码后的视频宽高和编码,还可以传入过滤器对视频做处理,这边不做处理
    SwsContext *swsContext = sws_getContext(codecContext->width, codecContext->height,
                                            codecContext->pix_fmt,
                                            codecContext->width, codecContext->height,
                                            AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL
    );

    while (av_read_frame(formatContext, pkt) == 0 && is_playing) {//读到每一帧的压缩数据存放在AVPacket
        if (pkt->stream_index == vidio_stream_idx) {
            //解码
            avcodec_decode_video2(codecContext, picture, &picture_ptr, pkt);

            LOGE("picture_ptr %d", picture_ptr);
            if (picture_ptr > 0) {
                //转码 data中存放着真实数据,linesize为一行的数据,0为转码起始位置,高度为整个画面高
                sws_scale(swsContext, picture->data, picture->linesize, 0, picture->height,
                          picture_rgb->data, picture_rgb->linesize);
                //锁定
                ANativeWindow_lock(aNativeWindow, &out_buff, NULL);

                //将转码后的frame(picture_rgb)中的数据复制到window的视频缓冲区(out_buff)
                //picture_rgb中二维数据的首地址
                uint8_t *frame_data_p = picture_rgb->data[0];
                //视频缓存区中二维数据的首地址
                uint8_t *buff_data_p = (uint8_t *) (out_buff.bits);
                //视频缓存区有多少个字节 RGBA8888占4个字节
                int destStride = out_buff.stride * 4;
                for (int i = 0; i < codecContext->height; i  ) {
                    memcpy(buff_data_p, frame_data_p, picture_rgb->linesize[0]);
                    buff_data_p  = destStride;
                    frame_data_p  = picture_rgb->linesize[0];
                }

                ANativeWindow_unlockAndPost(aNativeWindow);
                //16ms播放一帧
                usleep(16 * 1000);
            }
        }

        av_free_packet(pkt);
    }

    //释放资源
    sws_freeContext(swsContext);
    ANativeWindow_release(aNativeWindow);
    av_frame_free(&picture_rgb);
    av_frame_free(&picture);
    avcodec_close(codecContext);
    avformat_free_context(formatContext);
    env->ReleaseStringUTFChars(filePath_, filePath);

    return 0;
}

extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_ffmpegapplication_YuvPlayView_00024PlayThread_stopPlay(JNIEnv *env,
                                                                      jobject instance) {
    is_playing = false;
}
通过ANativeWindow进行绘制,要注意的是两个缓冲区数据复制时,指针的位移量,思路是:由于每一帧的数据其实就是一个二维数组,将数据进行逐行复制。最后别忘了在cmake中增加ANativeWindow的支持
代码语言:javascript复制
target_link_libraries(
        native-lib
        avcodec-56
        avdevice-56
        avfilter-5
        avformat-56
        avutil-54
        postproc-53
        swresample-1
        swscale-3

        android
        ${log-lib})
最后效果:

0 人点赞