1.资源文件
https://download.csdn.net/download/twicave/89579327
上面是.264 .265和原始的YUV420文件,各自的大小。
2.转换工具:
2.1 .h264 .h265互转
可以使用ffmpeg工具:Builds - CODEX FFMPEG @ gyan.dev
命令行参数:
ffmpeg -i Tennis1080p.h264 -c:v libx265 -preset medium -crf 28 Tennis1080p.h265
2.2 .h264 转 yuv
我因为要测试rk3588上一个硬件解码器,使用的是一个.c代码:
mpp-test: Rockchip MPP的简单案例
2.3 yuv file reader
这个可以使用python直接处理:
import cv2
import numpy as npyuv_frame_pack_file = "Tennis1080p.yuv"def yuv420sp_to_rgb(nv12, width, height):print('lenOfframe = ', len(nv12))nv12_array = np.frombuffer(nv12, dtype=np.uint8)#if len(nv12_array) > (width * height * 3 // 2):# height += ((len(nv12_array) - (width * height * 3 // 2)) // width * 2) // 3y_plane_size = height * widthuv_plane_size = (height // 2) * (width // 2) * 2# Split the NV12 data into Y plane and UV planey_plane = nv12_array[0:y_plane_size].reshape(height, width)uv_plane = nv12_array[y_plane_size:y_plane_size+uv_plane_size].reshape(height // 2, width) # UV plane is half the height of Y plane# Create a new NumPy array for the YUV image, with the same data as Y plane# but with a shape that OpenCV expects for YUV420sp (NV12)yuv420sp = np.zeros((height + height // 2, width), dtype=np.uint8)yuv420sp[:height, :] = y_plane # Copy Y plane datayuv420sp[height:, ::2] = uv_plane[:, 1::2] # Copy U plane datayuv420sp[height:, 1::2] = uv_plane[:, ::2] # Copy V plane data'''# 文件路径file_path = 'yuv420sp.bin'# 打开文件并写入数据with open(file_path, 'wb') as file:file.write(nv12)sys.exit(0)'''# Use OpenCV to convert YUV420sp (NV12) to RGBrgb_image = cv2.cvtColor(yuv420sp, cv2.COLOR_YUV2RGB_NV12)return rgb_image# 设置图像的宽度和高度
width, height = 1920, 1080# 读取一帧YUV420数据
cnt = 23
with open(yuv_frame_pack_file , 'rb') as file:while(cnt>0):cnt -=1yuv420_frame = file.read(width * height * 3 // 2) # YUV420格式,每帧大小为width * height * 3 / 2continueyuv420_frame = file.read(width * height * 3 // 2) # YUV420格式,每帧大小为width * height * 3 / 2yuv420_frame = np.frombuffer(yuv420_frame, dtype=np.uint8)rgb_frame = yuv420sp_to_rgb(yuv420_frame, width, height)cv2.imshow('RGB Image', rgb_frame)cv2.waitKey(0)cv2.destroyAllWindows()
2.3.1解码效果
附录A h.265=>yuv frames file转换工具
实际测试过程中,您可能需要对.h264, .h265的转换速度做比对,这里给出.h265转储为yuv frames file的C代码,相较2.2的原始c代码修改很少:
我只改了mpp_init,和需要处理的.h265文件名。
/*** 1. make* 2. ./mpp-dec-h264-to-yuv-file* 3. gst-launch-1.0 filesrc location=Tennis1080p.yuv ! videoparse width=1920 height=1080 format=nv12 ! videoconvert ! xvimagesink* 4. gst-launch-1.0 filesrc location=Tennis1080p.h264 ! h264parse ! mppvideodec ! xvimagesink*/
#include <unistd.h>
#include <stdio.h>
#include <rockchip/rk_mpi.h>#define __IN_FILE__ ("Tennis1080p.h265")
#define __OUT_FILE__ ("Tennis1080p.yuv")void dump_frame(MppFrame frame, FILE *out_fp)
{printf("dump_frame_to_file\n");RK_U32 width = 0;RK_U32 height = 0;RK_U32 h_stride = 0;RK_U32 v_stride = 0;MppFrameFormat fmt = MPP_FMT_YUV420SP;MppBuffer buffer = NULL;RK_U8 *base = NULL;width = mpp_frame_get_width(frame);height = mpp_frame_get_height(frame);h_stride = mpp_frame_get_hor_stride(frame);v_stride = mpp_frame_get_ver_stride(frame);fmt = mpp_frame_get_fmt(frame);buffer = mpp_frame_get_buffer(frame);RK_U32 buf_size = mpp_frame_get_buf_size(frame);printf("w x h: %dx%d hor_stride:%d ver_stride:%d buf_size:%d\n",width, height, h_stride, v_stride, buf_size);if (NULL == buffer) {printf("buffer is null\n");return ;}base = (RK_U8 *)mpp_buffer_get_ptr(buffer);// MPP_FMT_YUV420SPif (fmt != MPP_FMT_YUV420SP) {printf("fmt %d not supported\n", fmt);return;}RK_U32 i;RK_U8 *base_y = base;RK_U8 *base_c = base + h_stride * v_stride;for (i = 0; i < height; i++, base_y += h_stride) {fwrite(base_y, 1, width, out_fp);}for (i = 0; i < height / 2; i++, base_c += h_stride) {fwrite(base_c, 1, width, out_fp);}
}void dump_frame_to_file(MppCtx ctx, MppApi *mpi, MppFrame frame, FILE *out_fp)
{printf("decode_and_dump_to_file\n");MPP_RET ret;if (mpp_frame_get_info_change(frame)) {printf("mpp_frame_get_info_change\n");/*** 第一次解码会到这个分支,需要为解码器设置缓冲区.* 解码器缓冲区支持3种模式。参考【图像内存分配以及交互模式】Rockchip_Developer_Guide_MPP_CN.pdf* 这里使用纯内部模式。*/ret = mpi->control(ctx, MPP_DEC_SET_INFO_CHANGE_READY, NULL);if (ret) {printf("mpp_frame_get_info_change mpi->control error""MPP_DEC_SET_INFO_CHANGE_READY %d\n", ret);}return;}RK_U32 err_info = mpp_frame_get_errinfo(frame);RK_U32 discard = mpp_frame_get_discard(frame); printf("err_info: %u discard: %u\n", err_info, discard);if (err_info) {return;}// savedump_frame(frame, out_fp);return;
}int main(void)
{printf("---------- mpp start ----------\n");// 1. 打开输入文件FILE *in_fp = fopen(__IN_FILE__, "rb");if (!in_fp) {printf("fopen error\n");return -1;}// 2. 打开输出文件FILE *out_fp = fopen(__OUT_FILE__, "wb+");if (!out_fp) {printf("fopen error\n");return -1;}// 3. 初始化解码器上下文,MppCtx MppApiMppCtx ctx = NULL;MppApi *mpi = NULL;MPP_RET ret = mpp_create(&ctx, &mpi);if (MPP_OK != ret) {printf("mpp_create error\n");return -1;}/*** 4. 配置解器* - 解码文件需要 split 模式* - 设置非阻塞模式,0非阻塞(默认),-1阻塞,+val 超时(ms)*/RK_U32 need_split = -1;ret = mpi->control(ctx, MPP_DEC_SET_PARSER_SPLIT_MODE, (MppParam*)&need_split);if (MPP_OK != ret) {printf("mpi->control error MPP_DEC_SET_PARSER_SPLIT_MODE\n");return -1;}ret = mpp_init(ctx, MPP_CTX_DEC, MPP_VIDEO_CodingHEVC); // 固定为H265 https://blog.csdn.net/weixin_38807927/article/details/135760601if (MPP_OK != ret) {printf("mpp_init error\n");return -1;}// 5. 初始化包,MppPacketint buf_size = 5 * 1024 * 1024;char *buf = (char*)malloc(buf_size);if (!buf) {printf("malloc error\n");return -1;}MppPacket pkt = NULL;ret = mpp_packet_init(&pkt, buf, buf_size);if (MPP_OK != ret) {printf("mpp_packet_init error\n");return -1;}// 6. 循环读取文件,输入解码器,解码,保存结果int over = 0;while (!over) {printf("decode...\n");int len = fread(buf, 1, buf_size, in_fp);printf("read file length:%d\n", len);if (0 < len) {mpp_packet_write(pkt, 0, buf, len);mpp_packet_set_pos(pkt, buf);mpp_packet_set_length(pkt, len);if (feof(in_fp) || len < buf_size) { // 文件读完,设置结束标志位mpp_packet_set_eos(pkt);printf("mpp_packet_set_eos\n");}}/*** decode_put_packet返回失败,意味着内部缓冲区已满。* 非阻塞模式,使用pkt_is_send判断当前读取的数据包(buf)是否成功发送。*/int pkt_is_send = 0;while (!pkt_is_send && !over) {if (0 < len) {printf("pkt remain:%d\n", mpp_packet_get_length(pkt));ret = mpi->decode_put_packet(ctx, pkt);if (MPP_OK == ret) {printf("pkt send success remain:%d\n", mpp_packet_get_length(pkt));pkt_is_send = 1;}}MppFrame frame;MPP_RET ret;ret = mpi->decode_get_frame(ctx, &frame);if (MPP_OK != ret || !frame) {printf("decode_get_frame falied ret:%d\n", ret);usleep(2000); // 等待一下2ms,通常1080p解码时间2mscontinue;}printf("decode_get_frame success\n");dump_frame_to_file(ctx, mpi, frame, out_fp);if (mpp_frame_get_eos(frame)) {printf("mpp_frame_get_eos\n");mpp_frame_deinit(&frame);over = 1;continue;}mpp_frame_deinit(&frame);}}// 7. 释放资源fclose(in_fp);fclose(out_fp);mpi->reset(ctx);mpp_packet_deinit(&pkt);mpp_destroy(ctx);free(buf);printf("---------- mpp over ----------\n");return 0;
}
A.1相应的make file :
将文件放置在.c文件的同级目录,命名为:makefile
app: mpp-dec-h264-to-yuv-file.c mpp-multi-thread-demo.c mpp-dec-h265-to-yuv-file.cgcc mpp-dec-h264-to-yuv-file.c -o mpp-dec-h264-to-yuv-file -lrockchip_mppgcc mpp-dec-h265-to-yuv-file.c -o mpp-dec-h265-to-yuv-file -lrockchip_mppgcc mpp-multi-thread-demo.c -o mpp-multi-thread-demo -lrockchip_mpp -lpthread -I/usr/include/glib-2.0 -I/usr/lib/aarch64-linux-gnu/glib-2.0/include -lglib-2.0
需要编译时:执行
make
即可。