#ifndef HXVIDEODECODER_H #define HXVIDEODECODER_H #include #include #include "HxUtils.h" #include extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libavutil/pixfmt.h" #include "libavutil/imgutils.h" #include "libswscale/swscale.h" #include "libswresample/swresample.h" } #if USE_ALGORITHM #include "rk_mpi.h" #include "mpp_mem.h" #include "mpp_time.h" #include "mpp_env.h" #include "mpp_common.h" #include "mpp_frame.h" #include "mpp_buffer_impl.h" #include "RgaUtils.h" #include "im2d.hpp" #endif class HxVideoDecoder { public: HxVideoDecoder(void) {} #ifndef USE_ALGORITHM bool initialization(AVFormatContext* ifmt_ctx) { auto video_stream_index = -1; for (uint i = 0; i < ifmt_ctx->nb_streams; i++) { if (ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) video_stream_index = static_cast(i); } if (video_stream_index == -1) return false; /* 配置解码器 */ video_dec_ctx = avcodec_alloc_context3(nullptr); if (video_dec_ctx == nullptr) { printf("Could not allocate AVCodecContext"); return false; } /* 拷贝 */ avcodec_parameters_to_context(video_dec_ctx, ifmt_ctx->streams[video_stream_index]->codecpar); /* 查找解码器 */ auto codec = avcodec_find_decoder(video_dec_ctx->codec_id); if (!codec) return false; /* 打开解码器 */ if (avcodec_open2(video_dec_ctx, codec, nullptr) < 0) { codec = nullptr; avcodec_free_context(&video_dec_ctx); return false; } video_dec_frame = av_frame_alloc(); video_picture_frame = av_frame_alloc(); /* 通过指定像素格式、图像宽、图像高来计算所需的内存大小 */ auto numBytes = av_image_get_buffer_size(AV_PIX_FMT_BGR24, video_dec_ctx->width, video_dec_ctx->height, 1); /* 创建 video_out_buffer */ video_out_buffer = static_cast(av_malloc(static_cast(numBytes) * sizeof(uint8_t))); /* 填充数据 */ av_image_fill_arrays(video_picture_frame->data, video_picture_frame->linesize, video_out_buffer, AV_PIX_FMT_BGR24, video_dec_ctx->width, video_dec_ctx->height, 1); auto pix_fmt = video_dec_ctx->pix_fmt; if (video_dec_ctx->pix_fmt == AVPixelFormat::AV_PIX_FMT_NONE) pix_fmt = AV_PIX_FMT_YUV420P; /* 将解码后的YUV数据转换成RGB32 */ video_sws_context = sws_getContext( video_dec_ctx->width, /* 输入图像的宽度 */ video_dec_ctx->height, /* 输入图像的高度 */ pix_fmt, /* 输入图像的像素格式 */ video_dec_ctx->width, /* 输出图像的宽度 */ video_dec_ctx->height, /* 输出图像的高度 */ AV_PIX_FMT_BGR24, /* 输出图像的像素格式 */ SWS_BICUBIC, /* 选择缩放算法(只有当输入输出图像大小不同时有效),一般选择SWS_FAST_BILINEAR */ nullptr, /* 输入图像的滤波器信息, 若不需要传NULL */ nullptr, /* 输出图像的滤波器信息, 若不需要传NULL */ nullptr /* 特定缩放算法需要的参数(?),默认为NULL */ ); return true; } #else bool initialization(void) { MPP_RET ret = MPP_OK; RK_U32 need_split = 1; /* 创建 MPP context 和 MPP api 接口 */ ret = mpp_create(&mpp_ctx, &mpp_mpi); if (ret != MPP_OK) { release(); HxLog::append("mpp", QString("mpp_create failed")); return false; } /* 配置解器 按帧输入码流 */ ret = mpp_mpi->control(mpp_ctx, MPP_DEC_SET_PARSER_SPLIT_MODE, (MppParam*)&need_split); if (MPP_OK != ret) { release(); HxLog::append("mpp", QString("mpi->control failed")); return false; } /* 配置解器 队列输入 */ ret = mpp_mpi->control(mpp_ctx, MPP_SET_INPUT_BLOCK, (MppParam*)&need_split); if (MPP_OK != ret) { release(); HxLog::append("mpp", QString("mpi->control failed")); return false; } /* 初始化 MPP (固定为H264) */ ret = mpp_init(mpp_ctx, MPP_CTX_DEC, MPP_VIDEO_CodingAVC); if (MPP_OK != ret) { release(); HxLog::append("mpp", QString("mpp_init failed")); return false; } return true; } #endif void release(void) { #ifndef USE_ALGORITHM if (video_dec_ctx) { avcodec_free_context(&video_dec_ctx); avcodec_close(video_dec_ctx); av_frame_free(&video_dec_frame); av_frame_free(&video_picture_frame); sws_freeContext(video_sws_context); if (video_out_buffer) av_free(video_out_buffer); video_out_buffer = nullptr; } #else if (mpp_ctx) { mpp_mpi->reset(mpp_ctx); mpp_destroy(mpp_ctx); mpp_ctx = nullptr; } if (mpp_frame_group) { mpp_buffer_group_put(mpp_frame_group); mpp_frame_group = nullptr; } #endif } void decode(AVPacket* packet, cv::Mat* mat) { #ifndef USE_ALGORITHM /* 发送数据到ffmepg,放到解码队列中 */ if (avcodec_send_packet(video_dec_ctx, packet) == 0) { /* 将成功的解码队列中取出1个frame */ if (avcodec_receive_frame(video_dec_ctx, video_dec_frame) == 0) { /* 开始转换 */ sws_scale(video_sws_context, static_cast(video_dec_frame->data), video_dec_frame->linesize, 0, video_dec_ctx->height, video_picture_frame->data, video_picture_frame->linesize); (*mat) = cv::Mat(cv::Size(video_dec_ctx->width, video_dec_ctx->height), CV_8UC3); (*mat).data = video_out_buffer; } } #else RK_U32 pkt_done = 0; RK_U32 pkt_eos = 0; RK_U32 err_info = 0; MPP_RET ret = MPP_OK; MppPacket mpp_packet = nullptr; MppFrame mpp_frame = nullptr; ret = mpp_packet_init(&mpp_packet, packet->data, packet->size); if (ret < 0) return; mpp_packet_set_pts(mpp_packet, packet->pts); // qDebug("av_packet->data:%hhn", packet->data); do { // msleep(1); RK_S32 count = 5; // send the packet first if packet is not done if (!pkt_done) { ret = mpp_mpi->decode_put_packet(mpp_ctx, mpp_packet); if (MPP_OK == ret) pkt_done = 1; } // then get all available frame and release do { // msleep(1); RK_S32 get_frm = 0; RK_U32 frame_eos = 0; try_again: ret = mpp_mpi->decode_get_frame(mpp_ctx, &mpp_frame); if (MPP_ERR_TIMEOUT == ret) { if (count > 0) { count--; msleep(2); goto try_again; } qDebug("decode_get_frame failed too much time"); } // qDebug("get MPP_OK:%d", MPP_OK); // qDebug("get ret:%d", ret); if (MPP_OK != ret) { qDebug("decode_get_frame failed ret %d", ret); break; } // qDebug("get frame:%p", mpp_frame); if (mpp_frame) { if (mpp_frame_get_info_change(mpp_frame)) { RK_U32 width = mpp_frame_get_width(mpp_frame); RK_U32 height = mpp_frame_get_height(mpp_frame); RK_U32 hor_stride = mpp_frame_get_hor_stride(mpp_frame); RK_U32 ver_stride = mpp_frame_get_ver_stride(mpp_frame); RK_U32 buf_size = mpp_frame_get_buf_size(mpp_frame); qDebug("decode_get_frame get info changed found"); qDebug("decoder require buffer w:h [%d:%d] stride [%d:%d] buf_size %d", width, height, hor_stride, ver_stride, buf_size); ret = mpp_buffer_group_get_internal(&mpp_frame_group, MPP_BUFFER_TYPE_ION); if (ret) { qDebug("get mpp buffer group failed ret %d", ret); break; } mpp_mpi->control(mpp_ctx, MPP_DEC_SET_EXT_BUF_GROUP, mpp_frame_group); mpp_mpi->control(mpp_ctx, MPP_DEC_SET_INFO_CHANGE_READY, nullptr); } else { err_info = mpp_frame_get_errinfo(mpp_frame) | mpp_frame_get_discard(mpp_frame); if (err_info) { qDebug("decoder_get_frame get err info:%d discard:%d.", mpp_frame_get_errinfo(mpp_frame), mpp_frame_get_discard(mpp_frame)); } frame_count++; // qDebug("decode_get_frame get frame %d", frame_count); if (!err_info) { RK_U32 width = mpp_frame_get_width(mpp_frame); RK_U32 height = mpp_frame_get_height(mpp_frame); MppBuffer mpp_buffer = mpp_frame_get_buffer(mpp_frame); convert_to_mat(mpp_buffer, width, height, mat); // RK_U32 h_stride = mpp_frame_get_hor_stride(mpp_frame); // RK_U32 v_stride = mpp_frame_get_ver_stride(mpp_frame); // cv::Mat yuv_img(height * 3 / 2, width, CV_8UC1); // RK_U8 *base = (RK_U8 *)mpp_buffer_get_ptr(mpp_buffer); // RK_U8 *base_c = base + h_stride * v_stride; // int idx = 0; // for (int i = 0; i < height; i++, base += h_stride, idx += width) // { // memcpy(yuv_img.data + idx, base, width); // } // for (int i = 0; i < height / 2; i++, base_c += h_stride, idx += width) // { // memcpy(yuv_img.data + idx, base_c, width); // } // (*mat) = cv::Mat(cv::Size(width, height), CV_8UC3); // cv::cvtColor(yuv_img, (*mat), CV_YUV420sp2RGB); } } frame_eos = mpp_frame_get_eos(mpp_frame); mpp_frame_deinit(&mpp_frame); mpp_frame = nullptr; get_frm = 1; } /* 尝试获取运行时帧内存使用情况 */ if (mpp_frame_group) { size_t usage = mpp_buffer_group_usage(mpp_frame_group); if (usage > max_usage) max_usage = usage; } /* 如果发送了最后一个数据包,但没有找到最后一帧,则继续 */ if (pkt_eos && pkt_done && !frame_eos) { msleep(10); continue; } if (frame_eos) { qDebug("found last frame"); break; } if (frame_num > 0 && frame_count >= frame_num) { eos = 1; break; } if (get_frm) continue; break; } while (1); if (frame_num > 0 && frame_count >= frame_num) { eos = 1; qDebug("reach max frame number %d", frame_count); break; } if (pkt_done) break; /* * why sleep here: * mpi->decode_put_packet will failed when packet in internal queue is * full,waiting the package is consumed .Usually hardware decode one * frame which resolution is 1080p needs 2 ms,so here we sleep 3ms * * is enough. */ msleep(3); } while (1); mpp_packet_deinit(&mpp_packet); #endif } private: #if USE_ALGORITHM size_t mpp_buffer_group_usage(MppBufferGroup group) { if (group == nullptr) { qDebug("input invalid group %p", group); return MPP_BUFFER_MODE_BUTT; } return ((MppBufferGroupImpl*)group)->usage; } int write_image_to_file(void* buf, const char* filename, int sw, int sh, int fmt, int index) { int size; // char filePath[100]; // const char* outputFilePath = "%s/out%dw%d-h%d-%s.bin"; // snprintf(filePath, 100, outputFilePath, // path, index, sw, sh, translate_format_str(fmt)); FILE* file = fopen(filename, "wb+"); if (!file) { fprintf(stderr, "Could not open %s\n", filename); return false; } else { fprintf(stderr, "open %s and write ok\n", filename); } size = sw * sh * get_bpp_from_format(fmt); fwrite(buf, size, 1, file); fclose(file); return 0; } bool convert_to_mat(MppBuffer mpp_buffer, RK_U32 width, RK_U32 height, cv::Mat* mat) { int ret = 0; im_rect src_rect, dst_rect; rga_buffer_t src_img, dst_img; mat->create(cv::Size(width, height), CV_8UC3); memset(&src_rect, 0, sizeof(src_rect)); memset(&dst_rect, 0, sizeof(dst_rect)); memset(&src_img, 0, sizeof(src_img)); memset(&dst_img, 0, sizeof(dst_img)); src_img = wrapbuffer_virtualaddr(mpp_buffer_get_ptr(mpp_buffer), width, height, RK_FORMAT_YCbCr_420_SP); dst_img = wrapbuffer_virtualaddr(mat->data, width, height, RK_FORMAT_BGR_888); if (src_img.width == 0 || dst_img.width == 0) { printf("%s, %s\n", __FUNCTION__, imStrError()); return -1; } src_img.format = RK_FORMAT_YCbCr_420_SP; dst_img.format = RK_FORMAT_BGR_888; ret = imcheck(src_img, dst_img, src_rect, dst_rect); if (IM_STATUS_NOERROR != ret) { printf("%d, check error! %s", __LINE__, imStrError((IM_STATUS)ret)); return -1; } auto STATUS = imcvtcolor(src_img, dst_img, src_img.format, dst_img.format); return STATUS != IM_STATUS_SUCCESS; } #endif private: #if USE_ALGORITHM MppCtx mpp_ctx; MppApi* mpp_mpi; size_t max_usage; RK_U32 frame_count = 0, frame_num, eos; /* 缓冲区管理器 */ MppBufferGroup mpp_frame_group; #else uint8_t* video_out_buffer = nullptr; struct SwsContext* video_sws_context = nullptr; AVCodecContext* video_dec_ctx = nullptr; AVFrame* video_dec_frame = nullptr, * video_picture_frame = nullptr; #endif }; #endif // HXVIDEODECODER_H