使用live555作视频监控
文章目录
1 代码目录
.
├── main.cpp
├── Makefile
├── videoMonitor.cpp
├── videoMonitor.h
├── x264Encoder.cpp
└── x264Encoder.h
2 代码结构
结构很简单,我将所有工作封装到class VideoMonitor中,该类对外提供简单工作接口,包括类初始化、开始视频监控、停止视频监控,总共用到了两个子线程,一个用来读取摄象头数据,编码写入到FIFO;另外一个线程用来启动live555,实现推流工作。
3主要源码
main.cpp
1 #include <stdio.h>
2 #include <unistd.h>
3 #include <stdlib.h>
4
5 #include "videoMonitor.h"
6
7 VideoMonitor VM;
8
9 int main()
10 {
11 VM.init();
12
13 int fun;
14 while(1){
15 scanf("%d", &fun);
16
17 switch(fun){
18 case 0:
19 if(VM.startMonitor() == 0){
20 printf("start monitor!\n");
21 }
22 break;
23 case 1:
24 if(VM.stopMonitor() == 0){
25 printf("stop monitor!\n");
26 }
27 break;
28 default:
29 break;
30 }
31 }
32
33 return 0;
34 }
main.cpp
代码很简单,就是实例化VideoMonitor,并通过输入0,1控制监控的起停。
videoMonitor.h
1 #ifndef _VIDEO_MONITOR_H_
2 #define _VIDEO_MONITOR_H_
3
4 #include <stdio.h>
5 #include <stdlib.h>
6 #include <unistd.h>
7 #include <pthread.h>
8
9 //file
10 #include <sys/types.h>
11 #include <sys/stat.h>
12 #include <fcntl.h>
13
14 //opencv
15 #include <cxcore.h>
16 #include <highgui.h>
17 #include <cv.h>
18
19
20 //live555
21 #include <liveMedia.hh>
22 #include <BasicUsageEnvironment.hh>
23 #include <GroupsockHelper.hh>
24 #include <sys/types.h>
25 #include <sys/stat.h>
26
27 #include "x264Encoder.h"
28
29 #define FIFO "/tmp/fifo"
30
31 class VideoMonitor
32 {
33 public:
34 VideoMonitor();
35 ~VideoMonitor();
36 int init();
37 int startMonitor();
38 int stopMonitor();
39 void Destroy();
40
41 private:
42 pthread_t threadID_cam;
43 pthread_t threadID_live555;
44 static void *thread_cam(void *arg);
45 static void *thread_live555(void *arg);
46
47 static CvCapture *cap;
48 static int camHigh;
49 static int camWidth;
50 static RTSPServer* rtspServer;
51 static void play();
52 static void afterPlaying(void *);
53 };
54
55
56 #endif
videoMonitor.cpp
1 #include "videoMonitor.h"
2
3 using namespace cv;
4
5 int VideoMonitor::camHigh = 240;
6 int VideoMonitor::camWidth = 240;
7 CvCapture *VideoMonitor::cap = NULL;
8 RTSPServer* VideoMonitor::rtspServer = NULL;
9
10
11 UsageEnvironment *env = NULL;
12 char * ptr = NULL;
13 H264VideoStreamFramer *videoSource = NULL;
14 RTPSink *videoSink = NULL;
15
16 EventTriggerId DeviceSource::eventTriggerId = 0;
17
18 VideoMonitor::VideoMonitor()
19 {
20 }
21
22 VideoMonitor::~VideoMonitor()
23 {}
24
25 int VideoMonitor::init()
26 {
27 mkfifo(FIFO, 0777);
28 camHigh = 240;
29 camWidth = 320;
30 return 0;
31 }
32
33 int VideoMonitor::startMonitor()
34 {
35 if(threadID_cam != 0){
36 printf("monitor is running !\n");
37 return -1;
38 }
39
40 if(cap != NULL){
41 printf("camera is running !\n");
42 return -1;
43 }
44
45 cap = cvCreateCameraCapture(-1);
46 if(cap == NULL){
47 perror("open camera error!\n");
48 return -1;
49 }
50
51 if(pthread_create(&threadID_cam, NULL, thread_cam, NULL) != 0){
52 perror("create thread cam error!\n");
53 return -1;
54 }
55
56
57 //run live thread, only oncetime
58 if(threadID_live555 == 0){
59 if(pthread_create(&threadID_live555, NULL, thread_live555, NULL) != 0){
60 perror("create thread live555 error!\n");
61 return -1;
62 }
63 }
64 return 0;
65 }
66
67 int VideoMonitor::stopMonitor()
68 {
69 pthread_cancel(threadID_cam);
70 threadID_cam = 0;
71
72 cvReleaseCapture(&cap);
73 cap = NULL;
74
75 return 0;
76 }
77
78 void VideoMonitor::Destroy()
79 {
80 }
81
82 void *VideoMonitor::thread_cam(void *arg)
83 {
84 IplImage *pFrame = NULL;
85 cvNamedWindow("result", 1);
86
87 cvSetCaptureProperty(cap,CV_CAP_PROP_FRAME_WIDTH,320);
88 cvSetCaptureProperty(cap,CV_CAP_PROP_FRAME_HEIGHT,240);
89
90 x264Encoder x264(camWidth, camHigh, 0, 33);
91
92 int fd = open(FIFO, O_WRONLY|O_CREAT, 0777);
93 if(fd < 0){
94 printf("open fifo file error!");
95 return 0;
96 }
97
98 while(true){
99 pFrame = cvQueryFrame(cap);
100 if(pFrame == NULL) break;
101
102 cvShowImage("result", pFrame);
103 Mat mat = cvarrToMat(pFrame);
104 int size = x264.EncodeOneFrame(mat);
105 unsigned char *data = x264.GetEncodedFrame();
106 write(fd, data, size);
107 }
108 }
109
110 void *VideoMonitor::thread_live555(void *arg)
111 {
112 // Begin by setting up our usage environment:
113 TaskScheduler* scheduler = BasicTaskScheduler::createNew();
114 env = BasicUsageEnvironment::createNew(*scheduler);
115
116 // Create 'groupsocks' for RTP and RTCP:
117 struct in_addr destinationAddress;
118 destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
119 // Note: This is a multicast address. If you wish instead to stream
120 // using unicast, then you should use the "testOnDemandRTSPServer"
121 // test program - not this test program - as a model.
122
123 const unsigned short rtpPortNum = 18888;
124 const unsigned short rtcpPortNum = rtpPortNum+1;
125 const unsigned char ttl = 255;
126
127 const Port rtpPort(rtpPortNum);
128 const Port rtcpPort(rtcpPortNum);
129
130 Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
131 rtpGroupsock.multicastSendOnly(); // we're a SSM source
132 Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
133 rtcpGroupsock.multicastSendOnly(); // we're a SSM source
134
135 // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
136 OutPacketBuffer::maxSize = 600000;
137 videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);
138
139 // Create (and start) a 'RTCP instance' for this RTP sink:
140 const unsigned estimatedSessionBandwidth = 10000; // in kbps; for RTCP b/w share
141 const unsigned maxCNAMElen = 100;
142 unsigned char CNAME[maxCNAMElen+1];
143 gethostname((char*)CNAME, maxCNAMElen);
144 CNAME[maxCNAMElen] = '\0'; // just in case
145 RTCPInstance* rtcp
146 = RTCPInstance::createNew(*env, &rtcpGroupsock,
147 estimatedSessionBandwidth, CNAME,
148 videoSink, NULL /* we're a server */,
149 True /* we're a SSM source */);
150 // Note: This starts RTCP running automatically
151
152 //RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
153 rtspServer = RTSPServer::createNew(*env, 8554);
154 if (rtspServer == NULL) {
155 *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
156 exit(1);
157 }
158 ServerMediaSession* sms
159 = ServerMediaSession::createNew(*env, "testStream", FIFO,
160 "Session streamed by \"testH264VideoStreamer\"",
161 True /*SSM*/);
162 sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
163 rtspServer->addServerMediaSession(sms);
164
165 char* url = rtspServer->rtspURL(sms);
166 *env << "Play this stream using the URL \"" << url << "\"\n";
167 delete[] url;
168
169 // Start the streaming:
170 *env << "Beginning streaming...\n";
171 play();
172
173 env->taskScheduler().doEventLoop(); // does not return
174 return 0;
175 }
176
177
178
179
180 void VideoMonitor::afterPlaying(void* /*clientData*/) {
181 *env << "...done reading from file\n";
182 videoSink->stopPlaying();
183 Medium::close(videoSource);
184 //Camera.Destory();
185 // Note that this also closes the input file that this source read from.
186
187 // Start playing once again:
188 play();
189 }
190
191 void VideoMonitor::play()
192 {
193 // Open the input file as a 'byte-stream file source':
194 ByteStreamFileSource* fileSource
195 = ByteStreamFileSource::createNew(*env, FIFO);
196 if (fileSource == NULL) {
197 *env << "Unable to open file \"" << FIFO
198 << "\" as a byte-stream file source\n";
199 exit(1);
200 }
201
202 FramedSource* videoES = fileSource;
203
204 // Create a framer for the Video Elementary Stream:
205 videoSource = H264VideoStreamFramer::createNew(*env, videoES);
206
207 // Finally, start playing:
208 *env << "Beginning to read from file...\n";
209 videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
210 }
4 遇到的问题
- threadCam线程采集数据并编码的速度会直接影响到推流后的播放效果,有卡顿、阴影的情况。
- 曾试图结束thread_live555 线程,但是经查阅源码发现,RTSPServer的析构函数为protect ,无法释放对象,导致该线程二次启动时端口总是占用而启动失败。
- 推流后的视频有3s左右的延时。
解决办法:
1.降低视频采集图像大小,在硬件算力一定的情况下,提高生产者吞吐率。
2. 经过阅读源码,RTSPServer线程占用资源很少,socket的监听采用IO多路服用,所以live555就设计成不可直接析构。
3. 推流效果有3s左右的延时,这个应该是取决于硬件的性能及live555的推流算法,图像有采集-》压缩-》写入FIFO-》读取FIFO-》socket发送-》VLC播放 这个链路很定有时间损耗,应该是在所难免,等主要工作完成后在花时间调试跟踪吧。
链接
完整代码贴到个人github
github 连不上去,稍后上传。
https://github.com/qq930248524/live555.git
欢迎下方讨论,指正代码不足。