如何使用GStreamer在iOS 8中使用AVSampleBufferDisplayLayer进行RTP H264stream处理?

在获得了iOS 8程序员可以使用的HW-H264-Decoder的通知之后,我想现在就使用它。 那里有一个很好的介绍,从WWDC 2014直接访问video编码和解码。 你可以看看这里

基于案例1,我开始开发一个应用程序,它应该能够从GStreamer获得一个H264-RTP-UDP-Stream,将其沉入一个“appsink”元素中,以直接访问NAL单元,转换为创buildCMSampleBuffers,我的AVSampleBufferDisplayLayer可以显示然后。

有趣的一段代码做了以下所有的事情:

// // GStreamerBackend.m // #import "GStreamerBackend.h" NSString * const naluTypesStrings[] = { @"Unspecified (non-VCL)", @"Coded slice of a non-IDR picture (VCL)", @"Coded slice data partition A (VCL)", @"Coded slice data partition B (VCL)", @"Coded slice data partition C (VCL)", @"Coded slice of an IDR picture (VCL)", @"Supplemental enhancement information (SEI) (non-VCL)", @"Sequence parameter set (non-VCL)", @"Picture parameter set (non-VCL)", @"Access unit delimiter (non-VCL)", @"End of sequence (non-VCL)", @"End of stream (non-VCL)", @"Filler data (non-VCL)", @"Sequence parameter set extension (non-VCL)", @"Prefix NAL unit (non-VCL)", @"Subset sequence parameter set (non-VCL)", @"Reserved (non-VCL)", @"Reserved (non-VCL)", @"Reserved (non-VCL)", @"Coded slice of an auxiliary coded picture without partitioning (non-VCL)", @"Coded slice extension (non-VCL)", @"Coded slice extension for depth view components (non-VCL)", @"Reserved (non-VCL)", @"Reserved (non-VCL)", @"Unspecified (non-VCL)", @"Unspecified (non-VCL)", @"Unspecified (non-VCL)", @"Unspecified (non-VCL)", @"Unspecified (non-VCL)", @"Unspecified (non-VCL)", @"Unspecified (non-VCL)", @"Unspecified (non-VCL)", }; static GstFlowReturn new_sample(GstAppSink *sink, gpointer user_data) { GStreamerBackend *backend = (__bridge GStreamerBackend *)(user_data); GstSample *sample = gst_app_sink_pull_sample(sink); GstBuffer *buffer = gst_sample_get_buffer(sample); GstMemory *memory = gst_buffer_get_all_memory(buffer); GstMapInfo info; gst_memory_map (memory, &info, GST_MAP_READ); int startCodeIndex = 0; for (int i = 0; i < 5; i++) { if (info.data[i] == 0x01) { startCodeIndex = i; break; } } int nalu_type = ((uint8_t)info.data[startCodeIndex + 1] & 0x1F); NSLog(@"NALU with Type \"%@\" received.", naluTypesStrings[nalu_type]); if(backend.searchForSPSAndPPS) { if (nalu_type == 7) backend.spsData = [NSData dataWithBytes:&(info.data[startCodeIndex + 1]) length: info.size - 4]; if (nalu_type == 8) backend.ppsData = [NSData dataWithBytes:&(info.data[startCodeIndex + 1]) length: info.size - 4]; if (backend.spsData != nil && backend.ppsData != nil) { const uint8_t* const parameterSetPointers[2] = { (const uint8_t*)[backend.spsData bytes], (const uint8_t*)[backend.ppsData bytes] }; const size_t parameterSetSizes[2] = { [backend.spsData length], [backend.ppsData length] }; CMVideoFormatDescriptionRef videoFormatDescr; OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, parameterSetPointers, parameterSetSizes, 4, &videoFormatDescr); [backend setVideoFormatDescr:videoFormatDescr]; [backend setSearchForSPSAndPPS:false]; NSLog(@"Found all data for CMVideoFormatDescription. Creation: %@.", (status == noErr) ? @"successfully." : @"failed."); } } if (nalu_type == 1 || nalu_type == 5) { CMBlockBufferRef videoBlock = NULL; OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, info.data, info.size, kCFAllocatorNull, NULL, 0, info.size, 0, &videoBlock); NSLog(@"BlockBufferCreation: %@", (status == kCMBlockBufferNoErr) ? @"successfully." : @"failed."); const uint8_t sourceBytes[] = {(uint8_t)(info.size >> 24), (uint8_t)(info.size >> 16), (uint8_t)(info.size >> 8), (uint8_t)info.size}; status = CMBlockBufferReplaceDataBytes(sourceBytes, videoBlock, 0, 4); NSLog(@"BlockBufferReplace: %@", (status == kCMBlockBufferNoErr) ? @"successfully." : @"failed."); CMSampleBufferRef sbRef = NULL; const size_t sampleSizeArray[] = {info.size}; status = CMSampleBufferCreate(kCFAllocatorDefault, videoBlock, true, NULL, NULL, backend.videoFormatDescr, 1, 0, NULL, 1, sampleSizeArray, &sbRef); NSLog(@"SampleBufferCreate: %@", (status == noErr) ? @"successfully." : @"failed."); CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sbRef, YES); CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); NSLog(@"Error: %@, Status:%@", backend.displayLayer.error, (backend.displayLayer.status == AVQueuedSampleBufferRenderingStatusUnknown)?@"unknown":((backend.displayLayer.status == AVQueuedSampleBufferRenderingStatusRendering)?@"rendering":@"failed")); dispatch_async(dispatch_get_main_queue(),^{ [backend.displayLayer enqueueSampleBuffer:sbRef]; [backend.displayLayer setNeedsDisplay]; }); } gst_memory_unmap(memory, &info); gst_memory_unref(memory); gst_buffer_unref(buffer); return GST_FLOW_OK; } @implementation GStreamerBackend - (instancetype)init { if (self = [super init]) { self.searchForSPSAndPPS = true; self.ppsData = nil; self.spsData = nil; self.displayLayer = [[AVSampleBufferDisplayLayer alloc] init]; self.displayLayer.bounds = CGRectMake(0, 0, 300, 300); self.displayLayer.backgroundColor = [UIColor blackColor].CGColor; self.displayLayer.position = CGPointMake(500, 500); self.queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); dispatch_async(self.queue, ^{ [self app_function]; }); } return self; } - (void)start { if(gst_element_set_state(self.pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { NSLog(@"Failed to set pipeline to playing"); } } - (void)app_function { GstElement *udpsrc, *rtphdepay, *capsfilter; GMainContext *context; /* GLib context used to run the main loop */ GMainLoop *main_loop; /* GLib main loop */ context = g_main_context_new (); g_main_context_push_thread_default(context); g_set_application_name ("appsink"); self.pipeline = gst_pipeline_new ("testpipe"); udpsrc = gst_element_factory_make ("udpsrc", "udpsrc"); GstCaps *caps = gst_caps_new_simple("application/x-rtp", "media", G_TYPE_STRING, "video", "clock-rate", G_TYPE_INT, 90000, "encoding-name", G_TYPE_STRING, "H264", NULL); g_object_set(udpsrc, "caps", caps, "port", 5000, NULL); gst_caps_unref(caps); rtphdepay = gst_element_factory_make("rtph264depay", "rtph264depay"); capsfilter = gst_element_factory_make("capsfilter", "capsfilter"); caps = gst_caps_new_simple("video/x-h264", "streamformat", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING, "nal", NULL); g_object_set(capsfilter, "caps", caps, NULL); self.appsink = gst_element_factory_make ("appsink", "appsink"); gst_bin_add_many (GST_BIN (self.pipeline), udpsrc, rtphdepay, capsfilter, self.appsink, NULL); if(!gst_element_link_many (udpsrc, rtphdepay, capsfilter, self.appsink, NULL)) { NSLog(@"Cannot link gstreamer elements"); exit (1); } if(gst_element_set_state(self.pipeline, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) NSLog(@"could not change to ready"); GstAppSinkCallbacks callbacks = { NULL, NULL, new_sample, NULL, NULL}; gst_app_sink_set_callbacks (GST_APP_SINK(self.appsink), &callbacks, (__bridge gpointer)(self), NULL); main_loop = g_main_loop_new (context, FALSE); g_main_loop_run (main_loop); /* Free resources */ g_main_loop_unref (main_loop); main_loop = NULL; g_main_context_pop_thread_default(context); g_main_context_unref (context); gst_element_set_state (GST_ELEMENT (self.pipeline), GST_STATE_NULL); gst_object_unref (GST_OBJECT (self.pipeline)); } @end 

运行应用程序并开始stream式传输到iOS设备时得到的内容:

 NALU with Type "Sequence parameter set (non-VCL)" received. NALU with Type "Picture parameter set (non-VCL)" received. Found all data for CMVideoFormatDescription. Creation: successfully.. NALU with Type "Coded slice of an IDR picture (VCL)" received. BlockBufferCreation: successfully. BlockBufferReplace: successfully. SampleBufferCreate: successfully. Error: (null), Status:unknown NALU with Type "Coded slice of a non-IDR picture (VCL)" received. BlockBufferCreation: successfully. BlockBufferReplace: successfully. SampleBufferCreate: successfully. Error: (null), Status:rendering [...] (repetition of the last 5 lines) 

所以它似乎解码,因为它应该做的,但我的问题是,我无法看到我的AVSampleBufferDisplayLayer中的任何东西。 这可能是kCMSampleAttachmentKey_Display立即出现问题,但是我已经按照我在这里告诉的那样设置了它(请参阅“重要”注释) 。

欢迎每一个想法;)

现在得到它的工作。 每个NALU的长度不包含长度头本身。 所以我做我的info.size减去4之前使用它为我的sourceBytes。

在您的代码指示下,我编写了一个程序,使用AVSampleBufferDisplayLayer来解码和显示实况H.264stream。我使用live555而不是GSStream来接收H.264 NAL单元。

不幸的是,我的应用程序只显示几帧,然后没有图像可以显示更多。你的应用程序遇到过同样的问题?