opencv ffmpeg推流
生活随笔
收集整理的這篇文章主要介紹了
opencv ffmpeg推流
小編覺得挺不錯(cuò)的,現(xiàn)在分享給大家,幫大家做個(gè)參考.
基于opencv采集推流
1.opencv采集rtsp解碼 //可以基于usb 攝像機(jī)(調(diào)用系統(tǒng)驅(qū)動(dòng))和rtsp(調(diào)用ffmpeg 接口 轉(zhuǎn)yuv加解碼) 攝像機(jī)
2.ffmpeg縮放轉(zhuǎn)換像素格式
3.ffmpeg編碼H264
4.ffmpeg推流rtmp
sws_getCachedContext (像素格式轉(zhuǎn)換 會(huì)清理之前的數(shù)據(jù))
1.struct SwsContext *context, 2.int srcW,int srcH,enum AVPixelFormat srcFormat, 3.int dstW,int dstH 4.enum AVPixelFormat dstFormat, //AV_PIX_FMT_YUV420P 5.int flags. //SWS_BICUBIC 6.srcFilter,dstFilter,paramSWS_SCALE
1.c 上下文 2.srcSlice (原數(shù)據(jù)) srcStride(每一行數(shù)據(jù)長度) 3.srcSliceY srcSliceH(高度) 4.dst dstStride (輸出每一行大小) 5.Returns 輸出slice的高度avcodec_find_encoder
1.AVCodecID -AV_CODEC_ID_H264 2.avcodec_find_encoder_by_name 3.AVCodecavcodec_alloc_context3
1.AVCodecContext* 2.avcodec_alloc_context3 3.(const AVCodec *codec); int avcodec_open2 ( AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options);avcodec_send_frame
1.AVCodecContext *avctx; 2.const AVFrame *frame; 3.avcodec_receive_packetXRtmp.h
#pragma once #include <string> class AVCodecContext; class AVPacket; class XRtmp { public://工廠生產(chǎn)方法static XRtmp *Get(unsigned char index = 0);//初始化封裝器的上下文virtual bool Init(const char *url) = 0;//添加視頻或者音頻virtual bool AddStream(const AVCodecContext *c) = 0;//打開rtmp網(wǎng)絡(luò)IO,發(fā)送封裝頭virtual bool SendHead() = 0;//rtmp 幀推流virtual bool SendFrame(AVPacket *pkt) = 0;virtual ~XRtmp(); protected:XRtmp(); private:};XRtmp.cpp
#include "XRtmp.h" #include <iostream> using namespace std; extern "C" {#include <libavformat/avformat.h> }class CXRtmp : public XRtmp { public:void Close(){if(ic){avformat_close_input(&ic);vs = NULL;}vc = NULL;url = "";}bool Init(const char *url){///5 封裝器和視頻流配置//a.創(chuàng)建輸出封裝器上下文 int ret = avformat_alloc_output_context2(&ic,0,"flv",url);this->url = url;if(ret != 0){char buf[1024] = {0};av_strerror(ret,buf,sizeof(buf) - 1);cout<<buf;return false;}return true;}bool AddStream(const AVCodecContext *c){if(!c) return false;//b.添加視頻流AVStream *st = avformat_new_stream(ic,NULL);if(!st){cout<<"avformat_new_stream failed"<<endl;return false;}st->codecpar->codec_tag = 0;//從編碼器復(fù)制參數(shù)avcodec_parameters_from_context(st->codecpar, c);av_dump_format(ic,0,url.c_str(),1);if(c->codec_type == AVMEDIA_TYPE_VIDEO){vc = c;vs = st;}return true;}bool SendHead(){//打開rtmp的網(wǎng)絡(luò)輸出IOint ret = avio_open(&ic->pb,url.c_str(),AVIO_FLAG_WRITE);if(ret != 0){char buf[1024] = {0};av_strerror(ret,buf,sizeof(buf) - 1);cout<<buf<<endl;return false;}//寫入封裝頭ret = avformat_write_header(ic,NULL);if(ret != 0){char buf[1024] = {0};av_strerror(ret,buf,sizeof(buf) - 1);cout<<buf<<endl;return false;}return true;}bool SendFrame(AVPacket *pkt){//推流pkt->pts = av_rescale_q(pkt->pts,vc->time_base,vs->time_base);pkt->dts = av_rescale_q(pkt->dts,vc->time_base,vs->time_base);int ret = av_interleaved_write_frame(ic,pkt);if(ret == 0){cout<<"#"<<flush;}return true;} private://rtmp flv 封裝器AVFormatContext *ic = NULL;//視頻編碼器const AVCodecContext *vc = NULL;AVStream *vs = NULL;std::string url = ""; };//工廠生產(chǎn)方法 XRtmp* XRtmp::Get(unsigned char index) {static CXRtmp cxr[255];static bool isFirst = true;if(isFirst){//注冊(cè)所有的封裝器av_register_all();//注冊(cè)所有網(wǎng)絡(luò)協(xié)議avformat_network_init();isFirst = false;}return &cxr[index]; }XRtmp::XRtmp() {}XRtmp::~XRtmp() {}XMediaEncode.h
#pragma onceclass AVFrame; class AVPacket; class AVCodecContext; //音視頻編碼接口類 class XMediaEncode { public://輸入?yún)?shù)int inWidth = 1280;int inHeight = 720;int inPixSize = 3;//輸出參數(shù)int outWidth = 1280;int outHeight = 720;int bitrate = 4000000;//壓縮后每秒視頻的bit位大小50KBint fps = 25;//工廠生產(chǎn)方法static XMediaEncode *Get(unsigned char index = 0);//初始化像素格式轉(zhuǎn)換的上下文初始化virtual bool InitScale() = 0;virtual AVFrame* RGBToYUV(char *rgb) = 0;//編碼器的初始化virtual bool InitVideoCodec() = 0;//視頻編碼virtual AVPacket *EncodeVideo(AVFrame *frame) = 0;virtual ~XMediaEncode();//編碼器的上下文AVCodecContext *vc = 0; protected:XMediaEncode();};XMediaEncode.cpp
#include "XMediaEncode.h" #include <iostream> using namespace std;extern "C" {#include <libswscale/swscale.h>#include <libavcodec/avcodec.h>#include <libavformat/avformat.h> }class CXMediaEncode:public XMediaEncode { public:void close(){if(vsc){sws_freeContext(vsc);vsc = NULL;}if(yuv){av_frame_free(&yuv);}if(vc){avcodec_free_context(&vc);}vpts = 0;av_packet_unref(&packet);}bool InitVideoCodec(){//4初始化編碼上下文//a 找到編碼器AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);if(!codec){cout<<"Can't find h264 encoder!"<<endl;return false;}//b 創(chuàng)建編碼器上下文vc = avcodec_alloc_context3(codec);if(!vc){cout<<"avcodec_alloc_context3 failed!"<<endl;}//c 配置編碼器參數(shù)vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; //全局的參數(shù)vc->codec_id = codec->id;vc->thread_count = 8;vc->bit_rate = 50*1024*8; //壓縮后每秒視頻的bit位大小 50kbvc->width = outWidth;vc->height = outHeight;vc->time_base = {1,fps}; //時(shí)間基數(shù)vc->framerate = {fps,1};//畫面組的大小,多少幀一個(gè)關(guān)鍵幀vc->gop_size = 50;vc->max_b_frames = 0;vc->pix_fmt = AV_PIX_FMT_YUV420P;//d 打開編碼器int ret = avcodec_open2(vc,0,0);if(ret!=0){char buf[1024] = {0};av_strerror(ret,buf,sizeof(buf) - 1);cout<<buf<<endl;return false;}cout<<"avcodec_open2 success!"<<endl;return true;}AVPacket *EncodeVideo(AVFrame* frame){av_packet_unref(&packet);//h264編碼 frame->pts = vpts;vpts++;int ret = avcodec_send_frame(vc,frame);if(ret!=0)return NULL;//每次都會(huì)調(diào)用av_frame_unref(frame)ret = avcodec_receive_packet(vc,&packet);if(ret != 0 || packet.size <= 0)return NULL;return &packet;}bool InitScale(){//2.初始化格式轉(zhuǎn)換的上下文vsc = sws_getCachedContext(vsc,inWidth,inHeight,AV_PIX_FMT_BGR24,//原寬度高度outWidth,outHeight,AV_PIX_FMT_YUV420P,//輸出寬,高,像素格式SWS_BICUBIC,//尺寸變化算法0,0,0);if(!vsc){cout<<"sws_getCachedContext failed!";return false;}//3.輸出的數(shù)據(jù)結(jié)構(gòu)yuv = av_frame_alloc();yuv->format = AV_PIX_FMT_YUV420P;yuv->width = inWidth;yuv->height = inHeight;yuv->pts = 0;//分配yuv空間int ret = av_frame_get_buffer(yuv,32);if(ret != 0){char buf[1024] = {0};av_strerror(ret, buf,sizeof(buf) - 1);throw logic_error(buf);}return true;}AVFrame* RGBToYUV(char *rgb){//rgb to yuv//輸入的數(shù)據(jù)格式uint8_t *indata[AV_NUM_DATA_POINTERS] = {0};//bgrbgrbgr//plane inData[0]bbbb gggg rrrrindata[0] = (uint8_t*)rgb;int insize[AV_NUM_DATA_POINTERS] = {0};//一行(寬)數(shù)據(jù)的字節(jié)數(shù)insize[0] = inWidth * inPixSize;int h = sws_scale(vsc,indata,insize,0,inHeight, //輸入數(shù)據(jù)yuv->data,yuv->linesize);if(h<=0){return NULL;}return yuv;}private:SwsContext *vsc = NULL; //像素格式上下文AVFrame *yuv = NULL;AVPacket packet = {0};int vpts = 0; };XMediaEncode * XMediaEncode::Get(unsigned char index) {static bool isFirst = true;if(isFirst){//注冊(cè)所有的編碼器avcodec_register_all();isFirst = false;}static CXMediaEncode cxm[255];return &cxm[index]; }XMediaEncode::XMediaEncode() {}XMediaEncode::~XMediaEncode() {}main.cpp
#include <opencv2/core.hpp> #include <opencv2/imgcodecs.hpp> #include <opencv2/highgui.hpp> #include <stdexcept> // 1. 更換包含頭文件 #include <exception> #include <iostream> #include <string>#include "XMediaEncode.h" #include "XRtmp.h"using namespace cv; using namespace std;int main(int argc,char *argv) {VideoCapture cam;string inUrl = "rtsp://test:test@192.168.1.4";//nginx-rtmp 直播服務(wù)器rtmp推流URLchar *outUrl = "rtmp://0.0.0.0/live";//編碼器和像素格式轉(zhuǎn)換XMediaEncode *me = XMediaEncode::Get(0);//封裝和推流對(duì)象XRtmp *xr = XRtmp::Get(0);Mat frame;namedWindow("video");int ret = 0;try{//cam.open(inUrl);//1.使用opencv打開rtsp相機(jī)cam.open(0);if(!cam.isOpened()){throw logic_error("cam open failed");}cout<<inUrl<<"cam open success"<<endl;int inWidth = cam.get(CAP_PROP_FRAME_WIDTH);int inHeight = cam.get(CAP_PROP_FRAME_HEIGHT);int fps = cam.get(CAP_PROP_FPS);//2.初始化像素格式轉(zhuǎn)換的上下文初始化//3.輸出的數(shù)據(jù)結(jié)構(gòu)me->inWidth = inWidth;me->inHeight = inHeight;me->outWidth = inWidth;me->outHeight = inHeight;me->InitScale();if(!me->InitVideoCodec()){throw logic_error("InitVideoCodec failed!");}//初始化封裝器的上下文xr->Init(outUrl);//添加視頻或者音頻xr->AddStream(me->vc); xr->SendHead();for(;;){//讀取rtsp視頻幀,解碼視頻幀if(!cam.grab()){continue;}//yuv轉(zhuǎn)為rgb//輸入的數(shù)據(jù)結(jié)構(gòu)if(!cam.retrieve(frame)){continue;}//rgb to yuvme->inPixSize = frame.elemSize();AVFrame *yuv = me->RGBToYUV((char*)frame.data);if(!yuv) continue;AVPacket *pkt = me->EncodeVideo(yuv);if(!pkt) continue;xr->SendFrame(pkt);} }catch(exception &ex){if(cam.isOpened())cam.release();cerr<<ex.what()<<endl;}return 0; }CMakeLists.txt
cmake_minimum_required(VERSION 3.1) project(opencv_example_project)find_package(OpenCV REQUIRED)message(STATUS "OpenCV library status:") message(STATUS " config: ${OpenCV_DIR}") message(STATUS " version: ${OpenCV_VERSION}") message(STATUS " libraries: ${OpenCV_LIBS}") message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}")find_library(AVCODEC_LIBRARY avcodec) find_library(AVFORMAT_LIBRARY avformat) find_library(AVUTIL_LIBRARY avutil) find_library(AVDEVICE_LIBRARY avdevice)add_executable(opencv_example main.cpp XMediaEncode.cpp XRtmp.cpp)target_link_libraries(opencv_example PRIVATE ${OpenCV_LIBS} pthread swresample m swscale avformat avcodec avutil avfilter avdevice postproc z lzma rt)總結(jié)
以上是生活随笔為你收集整理的opencv ffmpeg推流的全部內(nèi)容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: win10 python ffmpeg推
- 下一篇: matlab地震动,MATLAB在结构地