需要在mpmovieplayercontroller swift中显示video的图像

我已经在我的同事档案中提出了这个疑问,但还没有解决scheme。 请帮我解决这个问题。 我已经打了4个多小时。

到目前为止我做了:

我使用从画廊中selectvideo,并使用mpmovieplayercontroller在屏幕上显示。 在这里,我一个一个select2个video。 如果我select第一个video,并显示,当我移动加载第二个video,我的第一个video屏幕是black 。 但我需要在该播放器中显示video图像。 如何做到这一点,请帮助我

我的代码:

 import UIKit import AVFoundation import MobileCoreServices import AssetsLibrary import MediaPlayer import CoreMedia class ViewController: UIViewController,UIGestureRecognizerDelegate { var Asset1: AVAsset? var Asset2: AVAsset? var Asset3: AVAsset? var audioAsset: AVAsset? var loadingAssetOne = false // swt duplicate image for thumbnail image for audio @IBOutlet weak var musicImg: UIImageView! var videoPlayer = MPMoviePlayerController() var mediaUI = UIImagePickerController() var videoURL = NSURL() override func viewDidLoad() { super.viewDidLoad() musicImg.hidden = true let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture") gestureRecognizer.delegate = self; videoPlayer.view.addGestureRecognizer(gestureRecognizer) } func handleTap(gestureRecognizer: UIGestureRecognizer) { //location = gestureRecognizer .locationInView(videoPlayer.view) print("tapped") } //#pragma mark - gesture delegate // this allows you to dispatch touches func gestureRecognizer(gestureRecognizer: UIGestureRecognizer, shouldReceiveTouch touch: UITouch) -> Bool { return true } func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWithGestureRecognizer otherGestureRecognizer: UIGestureRecognizer) -> Bool { return true } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool { if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false { return false } let mediaUI = UIImagePickerController() mediaUI.sourceType = .SavedPhotosAlbum mediaUI.mediaTypes = [kUTTypeMovie as String] mediaUI.allowsEditing = true mediaUI.delegate = delegate presentViewController(mediaUI, animated: true, completion: nil) return true } // after merge all video and audio. the final video will be saved in gallery and also will display like preview func exportDidFinish(session: AVAssetExportSession) { if session.status == AVAssetExportSessionStatus.Completed { let outputURL = session.outputURL let library = ALAssetsLibrary() if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) { library.writeVideoAtPathToSavedPhotosAlbum(outputURL, completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in if error != nil { print("some files went wrong") } else { // get the output url to display the final video in screen self.videoURL = outputURL! self.mediaUI.dismissViewControllerAnimated(true, completion: nil) self.videoPlayer = MPMoviePlayerController() self.videoPlayer.contentURL = self.videoURL self.videoPlayer.controlStyle = .Embedded self.videoPlayer.scalingMode = .AspectFill self.videoPlayer.shouldAutoplay = true self.videoPlayer.backgroundView.backgroundColor = UIColor.clearColor() self.videoPlayer.fullscreen = true self.videoPlayer.view.frame = CGRectMake(38, 442, 220, 106) self.view.addSubview(self.videoPlayer.view) self.videoPlayer.play() self.videoPlayer.prepareToPlay() } }) } } Asset1 = nil Asset2 = nil Asset3 = nil audioAsset = nil } // click first video @IBAction func FirstVideo(sender: AnyObject) { loadingAssetOne = true startMediaBrowserFromViewController(self, usingDelegate: self) } // clcik second video @IBAction func SecondVideo(sender: AnyObject) { loadingAssetOne = false startMediaBrowserFromViewController(self, usingDelegate: self) } // click audio @IBAction func Audio(sender: AnyObject) { let mediaPickerController = MPMediaPickerController(mediaTypes: .Any) mediaPickerController.delegate = self mediaPickerController.prompt = "Select Audio" presentViewController(mediaPickerController, animated: true, completion: nil) } @IBAction func playPreview(sender: AnyObject) { startMediaBrowserFromViewController(self, usingDelegate: self) } // orientation for the video func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) { var assetOrientation = UIImageOrientation.Up var isPortrait = false if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 { assetOrientation = .Right isPortrait = true } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 { assetOrientation = .Left isPortrait = true } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 { assetOrientation = .Up } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 { assetOrientation = .Down } return (assetOrientation, isPortrait) } func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] let transform = assetTrack.preferredTransform let assetInfo = orientationFromTransform(transform) var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width if assetInfo.isPortrait { scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio) instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), atTime: kCMTimeZero) } else { let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio) var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2)) if assetInfo.orientation == .Down { let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI)) let windowBounds = UIScreen.mainScreen().bounds let yFix = assetTrack.naturalSize.height + windowBounds.height let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix) concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor) } instruction.setTransform(concat, atTime: kCMTimeZero) } return instruction } // merge all file @IBAction func MergeAll(sender: AnyObject) { if let firstAsset = Asset1, secondAsset = Asset2 { let mixComposition = AVMutableComposition() //load first video let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) do { try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] , atTime: kCMTimeZero) } catch _ { } // load second video let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) do { try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] , atTime: firstAsset.duration) } catch _ { } let mainInstruction = AVMutableVideoCompositionInstruction() mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset) firstInstruction.setOpacity(0.0, atTime: firstAsset.duration) let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset) mainInstruction.layerInstructions = [firstInstruction, secondInstruction] let mainComposition = AVMutableVideoComposition() mainComposition.instructions = [mainInstruction] mainComposition.frameDuration = CMTimeMake(1, 30) mainComposition.renderSize = CGSize(width: UIScreen.mainScreen().bounds.width, height: UIScreen.mainScreen().bounds.height) //load audio if let loadedAudioAsset = audioAsset { let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0) do { try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)), ofTrack: loadedAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] , atTime: kCMTimeZero) } catch _ { } } // save the final video to gallery let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] let dateFormatter = NSDateFormatter() dateFormatter.dateStyle = .LongStyle dateFormatter.timeStyle = .ShortStyle let date = dateFormatter.stringFromDate(NSDate()) // let savePath = documentDirectory.URLByAppendingPathComponent("mergeVideo-\(date).mov") let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("final-\(date).mov") let url = NSURL(fileURLWithPath: savePath) let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) exporter!.outputURL = url exporter!.outputFileType = AVFileTypeQuickTimeMovie exporter!.shouldOptimizeForNetworkUse = true exporter!.videoComposition = mainComposition exporter!.exportAsynchronouslyWithCompletionHandler() { dispatch_async(dispatch_get_main_queue(), { () -> Void in self.exportDidFinish(exporter!) }) } } } } extension ViewController: UIImagePickerControllerDelegate { // display the first & second video after it picked from gallery func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) { let mediaType = info[UIImagePickerControllerMediaType] as! NSString dismissViewControllerAnimated(true, completion: nil) if mediaType == kUTTypeMovie { let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL) if loadingAssetOne { if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL { self.videoURL = vURL } else { print("oops, no url") } mediaUI.dismissViewControllerAnimated(true, completion: nil) self.videoPlayer = MPMoviePlayerController() self.videoPlayer.contentURL = videoURL self.videoPlayer.view.frame = CGRectMake(38, 57, 220, 106) self.view.addSubview(self.videoPlayer.view) self.videoPlayer.controlStyle = .Embedded self.videoPlayer.scalingMode = .AspectFill self.videoPlayer.shouldAutoplay = true self.videoPlayer.prepareToPlay() self.videoPlayer.play() Asset1 = avAsset } else { if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL { self.videoURL = vURL } else { print("oops, no url") } mediaUI.dismissViewControllerAnimated(true, completion: nil) self.videoPlayer = MPMoviePlayerController() self.videoPlayer.contentURL = videoURL self.videoPlayer.view.frame = CGRectMake(38, 206, 220, 106) self.view.addSubview(self.videoPlayer.view) self.videoPlayer.play() self.videoPlayer.controlStyle = .Embedded self.videoPlayer.scalingMode = .AspectFill self.videoPlayer.shouldAutoplay = true self.videoPlayer.prepareToPlay() Asset2 = avAsset } } } } extension ViewController: UINavigationControllerDelegate { } extension ViewController: MPMediaPickerControllerDelegate { func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) { let selectedSongs = mediaItemCollection.items if selectedSongs.count > 0 { let song = selectedSongs[0] if let vURL = song.valueForProperty(MPMediaItemPropertyAssetURL) as? NSURL { audioAsset = AVAsset(URL: vURL) dismissViewControllerAnimated(true, completion: nil) mediaUI.dismissViewControllerAnimated(true, completion: nil) musicImg.hidden = false let alert = UIAlertController(title: "yes", message: "Audio Loaded", preferredStyle: .Alert) alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil)) presentViewController(alert, animated: true, completion: nil) } else { dismissViewControllerAnimated(true, completion: nil) let alert = UIAlertController(title: "No audio", message: "Audio Not Loaded", preferredStyle: .Alert) alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil)) presentViewController(alert, animated: true, completion: nil) } } else { dismissViewControllerAnimated(true, completion: nil) } } func mediaPickerDidCancel(mediaPicker: MPMediaPickerController) { dismissViewControllerAnimated(true, completion: nil) } } 

我需要在屏幕上显示所选video的图像。请帮我解决,谢谢

我在这里遵循这个链接教程

如果想在同一视图中播放多个video,可以使用AVFoundation的AVPlayer.I在StackOverflow和AppleDoucument以及其他论坛中已经阅读了更多关于此的内容。所有他们认为使用MPMoviePlayerViewController是不可能的。但是可以使用AVPlayer。

 import UIKit import AVFoundation import MobileCoreServices import AssetsLibrary import MediaPlayer import CoreMedia class ViewController: UIViewController,UIGestureRecognizerDelegate { var optionalInteger: Int? var Asset1: AVAsset? var Asset2: AVAsset? var Asset3: AVAsset? var audioAsset: AVAsset? var loadingAssetOne = false @IBOutlet weak var musicImg: UIImageView! @IBOutlet var videoView: UIView! @IBOutlet var videoViewTwo: UIView! var player : AVPlayer? = nil var playerLayer : AVPlayerLayer? = nil var asset : AVAsset? = nil var playerItem: AVPlayerItem? = nil override func viewDidLoad() { super.viewDidLoad() musicImg.hidden = true let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture") gestureRecognizer.delegate = self; videoPlayer.view.addGestureRecognizer(gestureRecognizer) imageViewVideoOne.hidden = true imageViewVideoTwo.hidden = true } func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool { if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false { return false } let mediaUI = UIImagePickerController() mediaUI.sourceType = .SavedPhotosAlbum mediaUI.mediaTypes = [kUTTypeMovie as String] mediaUI.allowsEditing = true mediaUI.delegate = delegate presentViewController(mediaUI, animated: true, completion: nil) return true } // click first video @IBAction func FirstVideo(sender: AnyObject) { loadingAssetOne = true optionalInteger = 0 startMediaBrowserFromViewController(self, usingDelegate: self) } // clcik second video @IBAction func SecondVideo(sender: AnyObject) { loadingAssetOne = false optionalInteger = 1 startMediaBrowserFromViewController(self, usingDelegate: self) } // click audio @IBAction func Audio(sender: AnyObject) { let mediaPickerController = MPMediaPickerController(mediaTypes: .Any) mediaPickerController.delegate = self mediaPickerController.prompt = "Select Audio" presentViewController(mediaPickerController, animated: true, completion: nil) } @IBAction func playPreview(sender: AnyObject) { startMediaBrowserFromViewController(self, usingDelegate: self) } extension ViewController: UIImagePickerControllerDelegate { // display the first & second video after it picked from gallery func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) { let mediaType = info[UIImagePickerControllerMediaType] as! NSString dismissViewControllerAnimated(true, completion: nil) if mediaType == kUTTypeMovie { if loadingAssetOne { let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL) print(avAsset) if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL { self.videoURL = vURL } else { print("oops, no url") } mediaUI.dismissViewControllerAnimated(true, completion: nil) let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL self.videoURL = videoURLWithPath! asset = AVAsset(URL: videoURL) as AVAsset playerItem = AVPlayerItem(asset: asset!) player = AVPlayer (playerItem: self.playerItem!) playerLayer = AVPlayerLayer(player: self.player) videoView.frame = CGRectMake(38, 57, 220, 106) playerLayer?.frame = videoView.frame videoView.layer.addSublayer(self.playerLayer!) player!.play() } else { let avAssetTwo = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL) print(avAssetTwo) if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL { self.videoURL = vURL } else { print("oops, no url") } mediaUI.dismissViewControllerAnimated(true, completion: nil) let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL self.videoURL = videoURLWithPath! asset = AVAsset(URL: videoURL) as AVAsset playerItem = AVPlayerItem(asset: asset!) player = AVPlayer (playerItem: self.playerItem!) playerLayer = AVPlayerLayer(player: self.player) videoView.frame = CGRectMake(38, 206, 220, 106) playerLayer?.frame = videoView.frame videoView.layer.addSublayer(self.playerLayer!) player!.play() } } } } 

来源为您的问题

多个video相同的屏幕

iOS上的多video播放

苹果MPMoviePlayerViewController文档

同时播放多个video

苹果AVPlayer文件

播放多个video

回放

在iOS中播放video