// Record result int result; // R1 Java String -> C String constchar *path = env->GetStringUTFChars(path_, 0); // Register FFmpeg components av_register_all(); // R2 initializes the AVFormatContext context AVFormatContext *format_context = avformat_alloc_context(); // Open Video File result = avformat_open_input(&format_context, path, NULL, NULL); if (result < 0) { LOGE("Player Error : Can not open video file"); return; } // Finding Stream Information of Video Files result = avformat_find_stream_info(format_context, NULL); if (result < 0) { LOGE("Player Error : Can not find video file stream info"); return; } // Find Video Encoder int video_stream_index = -1; for (int i = 0; i < format_context->nb_streams; i++) { // Matching Video Stream if (format_context->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { video_stream_index = i; } } // No video stream found if (video_stream_index == -1) { LOGE("Player Error : Can not find video stream"); return; } // Initialization of Video Encoder Context AVCodecContext *video_codec_context = avcodec_alloc_context3(NULL); avcodec_parameters_to_context(video_codec_context, format_context->streams[video_stream_index]->codecpar); // Initialization of Video Encoder AVCodec *video_codec = avcodec_find_decoder(video_codec_context->codec_id); if (video_codec == NULL) { LOGE("Player Error : Can not find video codec"); return; } // R3 Opens Video Decoder result = avcodec_open2(video_codec_context, video_codec, NULL); if (result < 0) { LOGE("Player Error : Can not find video stream"); return; } // Getting the Width and Height of Video int videoWidth = video_codec_context->width; int videoHeight = video_codec_context->height;
// R4 Initializes Native Window s for Playing Videos ANativeWindow *native_window = ANativeWindow_fromSurface(env, surface); // surface对应java层的surface对象 if (native_window == NULL) { LOGE("Player Error : Can not create native window"); return; } // Limit the number of pixels in the buffer by setting the width, not the physical display size of the screen. // If the buffer does not match the display size of the physical screen, the actual display may be stretched or compressed images. result = ANativeWindow_setBuffersGeometry(native_window, videoWidth, videoHeight,WINDOW_FORMAT_RGBA_8888); if (result < 0){ LOGE("Player Error : Can not set native window buffer"); ANativeWindow_release(native_window); return; } // Define drawing buffer ANativeWindow_Buffer window_buffer; // There are three declarative data containers // Data container Packet encoding data before R5 decoding AVPacket *packet = av_packet_alloc(); av_init_packet(packet); // Frame Pixel Data of Data Container After R6 Decoding Can't Play Pixel Data Directly and Need Conversion AVFrame *frame = av_frame_alloc(); // R7 converted data container where the data can be used for playback AVFrame *rgba_frame = av_frame_alloc(); // Data format conversion preparation // Output Buffer int buffer_size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, videoWidth, videoHeight, 1); // R8 Application for Buffer Memory uint8_t *out_buffer = (uint8_t *) av_malloc(buffer_size * sizeof(uint8_t)); LOGI("outBuffer size: %d, videoWidth: %d, videoHeight: %d, pix_fmt: %d", buffer_size * sizeof(uint8_t), videoWidth, videoHeight, video_codec_context->pix_fmt); av_image_fill_arrays(rgba_frame->data, rgba_frame->linesize, out_buffer, AV_PIX_FMT_RGBA, videoWidth, videoHeight, 1); // R9 Data Format Conversion Context structSwsContext *data_convert_context = sws_getContext( videoWidth, videoHeight, video_codec_context->pix_fmt, videoWidth, videoHeight, AV_PIX_FMT_RGBA, SWS_BICUBIC, NULL, NULL, NULL); // Start reading frames