1.
LiveVideoCoreSDK
AudioUnitRender ==> MicSource::inputCallback ==> GenericAudioMixer::pushBuffer
GenericAudioMixer::mixThread() ==> AACEncode::pushBuffer ==> Split::pushBuffer ==> AACPacketizer::pushBuffer ==> LibRtmpSessionMgr::pushBuffer
http://www.jianshu.com/p/5b1341e97757
服务器 http://www.cnblogs.com/damiao/p/5231221.html
brew install ffmpeg --with-ffplay
http://blog.csdn.net/charleslei/article/details/72462791
- (void)didProcessFrame:(CMSampleBufferRef)sampleBuffer
{
if(mVideoFilterView){
dispatch_async(dispatch_get_main_queue(), ^{
// if (self.cameraState == VCCameraStateFront) {
// mVideoFilterView.mirror = YES;
// } else {
// mVideoFilterView.mirror = NO;
// }
[mVideoFilterView renderFrame:sampleBuffer];
});
CVPixelBufferRef pixelBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer);
m_cameraSource->bufferCaptured(pixelBufferRef);
}
if(self.rtmpSessionState == kVCSessionStateNone) {
self.rtmpSessionState = kVCSessionStatePreviewStarted;
}
}
==> m_cameraSource->setOutput(m_videoSplit)
==> m_videoSplit->setOutput(m_h264Encoder);
==> m_h264Encoder->setOutput(m_h264Split);
==> m_h264Split->setOutput(m_h264Packetizer);
==> m_h264Packetizer->setOutput(m_outputSession);
void IOSRTMPSession::pushBuffer(const uint8_t* const data, size_t size, IMetadata& metadata){ dispatch_semaphore_wait(m_lock, DISPATCH_TIME_FOREVER);
if (!m_iConnectedFlag) {
dispatch_semaphore_signal(m_lock);
return ;
}
dispatch_semaphore_signal(m_lock); const IOSRTMPMetadata_t inMetadata = static_cast<const IOSRTMPMetadata_t&>(metadata);
unsigned int uiMsgTypeId = inMetadata.getData<kIOSRTMPMetadataMsgTypeId>();
if((RTMP_PT_AUDIO != uiMsgTypeId) && (RTMP_PT_VIDEO != uiMsgTypeId)){
return;
} char* pSendBuff = (char*)malloc(size);
if (pSendBuff == NULL) {
printf("malloc pSendBuff failed\n");
return;
}
memcpy(pSendBuff, data, size);
int dts = inMetadata.getData<kIOSRTMPMetadataTimestamp>();
int pts = inMetadata.pts; int iret = ;
if(RTMP_PT_AUDIO == uiMsgTypeId){
iret = Librtmp_PutAudioBuffer(pSendBuff, (int)size, dts);
}else if (RTMP_PT_VIDEO == uiMsgTypeId){
iret = Librtmp_PutVideoBuffer(pSendBuff, (int)size , pts, pts);
} if (iret != ) {
printf("\n put buffer error.\n");
} free(pSendBuff); }
2.
推流流程
使用rtmp协议(其他协议也类似)推流的大体流程如下:
- 通过系统相机捕获视频及声音,该美颜的美颜,该滤镜的滤镜。
- 捕获的视频帧为yuv格式,音频帧为pcm格式。
- 将捕获的音视频数据,传入一个串行队列(编码队列),在队列中进行编码操作。
- 将yuv格式视频数据,转成h264格式视频帧;将pcm格式音频数据,转成aac格式音频帧。
- 将转好的h264及aac格式数据,转成flv视频帧。放入编码缓冲区,待发送。继续获取视频帧并编码。
- 建立rtmp连接到服务器,成功后,创建另一个串行队列(发送队列)。
- rtmp协议,需要在首帧发送 sps/pps和AudioSpecificConfig这2种特别的帧数据。
- 发送了首帧之后,发送队列不停从编码队列中获取flv视频帧,发送至rtmp服务端。
- 结束直播,关闭推流,释放资源。
https://www.jianshu.com/p/77fea6e0eccb
@synchronized(self) {
if (self.previousPixelBuffer){
CFRelease(self.previousPixelBuffer);
self.previousPixelBuffer = nil;
}
self.previousPixelBuffer = CFRetain(pixelBuffer);
}
- (void)didResignActive{
NSLog(@"resign active");
[self setupPlayerBackgroundImage];
} - (void) setupPlayerBackgroundImage{
if (self.isVideoHWDecoderEnable){
@synchronized(self) {
if (self.previousPixelBuffer){
self.image = [self getUIImageFromPixelBuffer:self.previousPixelBuffer];
CFRelease(self.previousPixelBuffer);
self.previousPixelBuffer = nil;
}
}
}
} - (UIImage*)getUIImageFromPixelBuffer:(CVPixelBufferRef)pixelBuffer
{
UIImage *uiImage = nil;
if (pixelBuffer){
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
uiImage = [UIImage imageWithCIImage:ciImage];
UIGraphicsBeginImageContext(self.bounds.size);
[uiImage drawInRect:self.bounds];
uiImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
return uiImage;
}
4.std:thread
void
GLESVideoMixer::start() {
m_mixThread = std::thread([this](){ this->mixThread(); });
}
https://www.github.com/runner365/LiveVideoCoreSDK/
3.ijkplayer
http://www.jianshu.com/p/a7d9ed02cf40