FFmpegAVDemuxer is a muxer dependent on FFmpeg lib, which can mux video stream and audio stream to media files. Currently, only mux MP4/FMP4 is supported(Currently, only Video Stream is supported.), and we will add support for more types of files in the future.
API Instructions
namespace com { namespace sunplus { namespace media { class FFmpegAVMuxer { public: FFmpegAVMuxer(bool isFMP4 = false); ~FFmpegAVMuxer(); public: int init(std::string filepath, AVCodecContext* videoCodecCtx); int init(std::string filepath, VideoStreamParam_t param, uint8_t* spspps, int spsppsSize); void uninit(); int putVideoFrame(AVPacket*& packet); int flush(); }; }}}
Constructors
FFmpegAVMuxer(bool isFMP4 = false)
init
Initialize muxer, set stream parameters, and write a header to the file.
/** * initialize demuxer. * * @param filepath the file to write. * * @param videoCodecCtx used to initialize stream. * * @return 0 if OK, < 0 on error. */ int init(std::string filepath, AVCodecContext* videoCodecCtx); /** * initialize demuxer. * * @param filepath the file to write. * * @param param used to initialize stream. * * @param spspps used to initialize stream. * * @return 0 if OK, < 0 on error. */ int init(std::string filepath, VideoStreamParam_t param, uint8_t* spspps, int spsppsSize);
uninit
Release all resources allocated by the init method and close the file.
/** * Release all resources allocated by the init method. */ void uninit();
putVideoFrame
Write the video packet to the muxer.
/** * write the packet to the muxer. * @return 0 if OK, < 0 on error. */ int putVideoFrame(AVPacket*& packet);
flush
Flush data buffered within the muxer and write the stream trailer to output media file.
It must be called after when there are no new frames to be written.
/** * Flush data buffered within the muxer and rite the stream trailer to an output media file * @return 0 if OK, < 0 on error. */ int flush();
Sample Code
This is a sample of how to record MP4 dependent on FFmpegH264Provider and FFmpegAVMuxer.
the flow of record MP4:
create yuv source --> create h264 provider --> h264 provider prepare -> create MP4 muxer -> init muxer --> create the thread of get the frame from h264 provider to put to muxer --> uninit muxer
void FFmpegMP4Muxer_Test(const char* path) { auto videoSource = make_shared<FFmpegV4L2VideoSource>("/dev/video0"); AVDictionary *options = nullptr; av_dict_set(&options, "video_size", "1280x720", 0); av_dict_set(&options, "framerate", "30", 0); av_dict_set(&options, "pixel_format", "uyvy422", 0); int ret = videoSource->open(options); auto h264Provider = make_shared<FFmpegH264Provider>(videoSource); VideoStreamParam_t param; param.width = videoSource->getAVStream()->codecpar->width; param.height = videoSource->getAVStream()->codecpar->height; param.pix_fmt = (enum AVPixelFormat)videoSource->getAVStream()->codecpar->format; param.time_base = videoSource->getAVStream()->time_base; param.gop = 30; param.bitrate = 1000000; param.fps = 30; auto mp4Muxer = make_shared<FFmpegAVMuxer>(true); ret = h264Provider->prepare(param); string filepath = "ffmpeg_fmp4_muxer_test.mp4"; if (path != nullptr) { filepath = string(path); } auto muxMp4ThreadFunc = [&](){ int index = 0; bool initMuxer = false; while(!is_exit) { AVPacket* packet = nullptr; auto ret = h264Provider->getFrame(packet); if (ret < 0 || packet == nullptr) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } printf("muxMp4Thread, get h264 frame[%d] pts: %lld, size: %d, isKeyFrame: %d\n", index, packet->pts, packet->size, packet->flags & AV_PKT_FLAG_KEY); if (!initMuxer) { ret = mp4Muxer->init(filepath, h264Provider->getAVCodecContext()); initMuxer = true; } ret = mp4Muxer->putVideoFrame(packet); av_packet_unref(packet); av_packet_free(&packet); index++; } mp4Muxer->flush(); mp4Muxer->uninit(); }; auto thread = make_shared<std::thread>(muxMp4ThreadFunc); _wait_exit("FFmpegMP4Muxer_Test"); h264Provider->destroy(); videoSource->close(); }
Test Result
./ffmpeg_sample mp4record [filepath]