结合前面的 采集 v4l2 视频, 使用 live555, 通过 rtsp 发布实时流. capture.h, capture.cpp, vcompress.h, vcompress.cpp 需要参考前面几片文章. 这里仅仅贴出 v4l2_x264_service.cpp [cpp] view plaincopy #includestdio.h #includestdlib.h #includeunistd
结合前面的 采集 v4l2 视频, 使用 live555, 通过 rtsp 发布实时流. capture.h, capture.cpp, vcompress.h, vcompress.cpp 需要参考前面几片文章. 这里仅仅贴出 v4l2_x264_service.cpp
[cpp] view plaincopy
#include #include #include #include #include #include
[cpp] view plaincopy
virtual unsigned maxFrameSize() const // 这个很重要, 如果不设置, 可能导致 getNextFrame() 出现 fMaxSize 小于实际编码帧的情况, 导致图像不完整
[cpp] view plaincopy
{ return 100*1024; }
[cpp] view plaincopy
private : static void getNextFrame ( void *ptr) { ((WebcamFrameSource*)ptr)->getNextFrame1(); } void getNextFrame1 () { // capture: Picture pic; if (capture_get_picture(mp_capture, &pic) fprintf(stderr, "==== %s: capture_get_picture err\n" , __func__); m_started = 0; return ; } // compress const void *outbuf; int outlen; if (vc_compress(mp_compress, pic.data, pic.stride, &outbuf, &outlen) fprintf(stderr, "==== %s: vc_compress err\n" , __func__); m_started = 0; return ; } int64_t pts, dts; int key; vc_get_last_frame_info(mp_compress, &key, &pts, &dts); // save outbuf gettimeofday(&fPresentationTime, 0); fFrameSize = outlen; if (fFrameSize > fMaxSize) { fNumTruncatedBytes = fFrameSize - fMaxSize; fFrameSize = fMaxSize; } else { fNumTruncatedBytes = 0; } memmove(fTo, outbuf, fFrameSize); // notify afterGetting( this ); m_started = 0; } }; class WebcamOndemandMediaSubsession : public OnDemandServerMediaSubsession { public : static WebcamOndemandMediaSubsession *createNew (UsageEnvironment &env, FramedSource *source) { return new WebcamOndemandMediaSubsession(env, source); } protected : WebcamOndemandMediaSubsession (UsageEnvironment &env, FramedSource *source) : OnDemandServerMediaSubsession(env, True) // reuse the first source { fprintf(stderr, "[%d] %s .... calling\n" , gettid(), __func__); mp_source = source; mp_sdp_line = 0; } ~WebcamOndemandMediaSubsession () { fprintf(stderr, "[%d] %s .... calling\n" , gettid(), __func__); if (mp_sdp_line) free(mp_sdp_line); } private : static void afterPlayingDummy ( void *ptr) { fprintf(stderr, "[%d] %s .... calling\n" , gettid(), __func__); // ok WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession*)ptr; This->m_done = 0xff; } static void chkForAuxSDPLine ( void *ptr) { WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession *)ptr; This->chkForAuxSDPLine1(); } void chkForAuxSDPLine1 () { fprintf(stderr, "[%d] %s .... calling\n" , gettid(), __func__); if (mp_dummy_rtpsink->auxSDPLine()) m_done = 0xff; else { int delay = 100*1000; // 100ms nextTask() = envir().taskScheduler().scheduleDelayedTask(delay, chkForAuxSDPLine, this ); } } protected : virtual const char *getAuxSDPLine (RTPSink *sink, FramedSource *source) { fprintf(stderr, "[%d] %s .... calling\n" , gettid(), __func__); if (mp_sdp_line) return mp_sdp_line; mp_dummy_rtpsink = sink; mp_dummy_rtpsink->startPlaying(*source, 0, 0); //mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this); chkForAuxSDPLine( this ); m_done = 0; envir().taskScheduler().doEventLoop(&m_done); mp_sdp_line = strdup(mp_dummy_rtpsink->auxSDPLine()); mp_dummy_rtpsink->stopPlaying(); return mp_sdp_line; } virtual RTPSink *createNewRTPSink(Groupsock *rtpsock, unsigned char type, FramedSource *source) { fprintf(stderr, "[%d] %s .... calling\n" , gettid(), __func__); return H264VideoRTPSink::createNew(envir(), rtpsock, type); } virtual FramedSource *createNewStreamSource (unsigned sid, unsigned &bitrate) { fprintf(stderr, "[%d] %s .... calling\n" , gettid(), __func__); bitrate = 500; return H264VideoStreamFramer::createNew(envir(), new WebcamFrameSource(envir())); } private : FramedSource *mp_source; // 对应 WebcamFrameSource char *mp_sdp_line; RTPSink *mp_dummy_rtpsink; char m_done; }; static void test_task ( void *ptr) { fprintf(stderr, "test: task ....\n" ); _env->taskScheduler().scheduleDelayedTask(100000, test_task, 0); } static void test (UsageEnvironment &env) { fprintf(stderr, "test: begin...\n" ); char done = 0; int delay = 100 * 1000; env.taskScheduler().scheduleDelayedTask(delay, test_task, 0); env.taskScheduler().doEventLoop(&done); fprintf(stderr, "test: end..\n" ); } int main ( int argc, char **argv) { // env TaskScheduler *scheduler = BasicTaskScheduler::createNew(); _env = BasicUsageEnvironment::createNew(*scheduler); // test //test(*_env); // rtsp server RTSPServer *rtspServer = RTSPServer::createNew(*_env, 8554); if (!rtspServer) { fprintf(stderr, "ERR: create RTSPServer err\n" ); ::exit(-1); } // add live stream do { WebcamFrameSource *webcam_source = 0; ServerMediaSession *sms = ServerMediaSession::createNew(*_env, "webcam" , 0, "Session from /dev/video0" ); sms->addSubsession(WebcamOndemandMediaSubsession::createNew(*_env, webcam_source)); rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *_env "using url \"" "\"\n" ; delete [] url; } while (0); // run loop _env->taskScheduler().doEventLoop(); return 1; }需要 live555 + libavcodec + libswscale + libx264, client 使用 vlc, mplayer, quicktime, .....
查看更多关于使用live555直播来自v4l2的摄像头图像的详细内容...
声明:本文来自网络,不代表【好得很程序员自学网】立场,转载请注明出处:http://haodehen.cn/did94832