如何通过USB镜像iOS屏幕?

我试图通过USB连接到OSX镜像iOS设备屏幕。 QuickTime可以做到这一点,我用一个代码示例阅读这篇文章: https : //nadavrub.wordpress.com/2015/07/06/macos-media-capture-using-coremediaio/

然而, CMIOStreamCopyBufferQueue的callback从来没有被调用,我想知道我在做什么错了?

有没有人遇到这个问题,可以提供一个工作的例子?

谢谢。

那么最终我做了Nadav在他的博客中告诉我的 – 发现DAL设备并使用AVCaptureSession捕获它们的输出:

 -(id) init { // Allow iOS Devices Discovery CMIOObjectPropertyAddress prop = { kCMIOHardwarePropertyAllowScreenCaptureDevices, kCMIOObjectPropertyScopeGlobal, kCMIOObjectPropertyElementMaster }; UInt32 allow = 1; CMIOObjectSetPropertyData( kCMIOObjectSystemObject, &prop, 0, NULL, sizeof(allow), &allow ); // Get devices NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]; BOOL deviceAttahced = false; for (int i = 0; i < [devices count]; i++) { AVCaptureDevice *device = devices[i]; if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) { deviceAttahced = true; [self startSession:device]; break; } } NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; // Device not attached - subscribe to onConnect notifications if (!deviceAttahced) { id deviceWasConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification object:nil queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification *note) { AVCaptureDevice *device = note.object; [self deviceConnected:device]; }]; observers = [[NSArray alloc] initWithObjects:deviceWasConnectedObserver, nil]; } return self; } - (void) deviceConnected:(AVCaptureDevice *)device { if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) { [self startSession:device]; } } - (void) startSession:(AVCaptureDevice *)device { // Init capturing session session = [[AVCaptureSession alloc] init]; // Star session configuration [session beginConfiguration]; // Add session input NSError *error; newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if (newVideoDeviceInput == nil) { dispatch_async(dispatch_get_main_queue(), ^(void) { NSLog(@"%@", error); }); } else { [session addInput:newVideoDeviceInput]; } // Add session output videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey]; dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", NULL); [videoDataOutput setSampleBufferDelegate:self queue:videoQueue]; [session addOutput:videoDataOutput]; // Finish session configuration [session commitConfiguration]; // Start the session [session startRunning]; } #pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { NSImage *resultNSImage = [self imageFromSampleBuffer:sampleBuffer]; /* * Here you can do whatever you need with the frame (eg convert to JPG) */ }