无法使用 AVFoundation 调用我的视频文件
Unable to call up my video files using AVFoundation
我在播放最近使用类似于 Snapchat 的混合 image/video 相机录制的视频时遇到困难(例如,点击拍照,按住录制视频,播放按钮发布)。
我目前正在将视频文件保存到 NSFileManager。当我注销时,我确实验证了 something 正在保存但无法检查文件,因为它必须在 phone.
上进行测试
我注销时的文件路径:
file:///var/mobile/Containers/Data/Application/7D86B14D-ACFF-4494-AD61-CBBD32DCA7A5/Documents/test.mov
当我从文件管理器加载资产时,我注销了无法打开文件的错误。我才刚刚开始使用 AVFoundation,所以在调试时不确定 issues/considerations 是什么。任何见解将不胜感激,谢谢!
参考代码:
PlayerView.h(参考)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface PlayerView : UIView
@property (nonatomic) AVPlayer *player;
- (void)setPlayer:(AVPlayer *)player;
@end
PlayerView.m(参考)
#import "PlayerView.h"
@implementation PlayerView
+ (Class)layerClass {
return [AVPlayerLayer class];
}
- (AVPlayer *)player {
return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player {
[(AVPlayerLayer *)[self layer] setPlayer:player];
}
@end
HybridCameraViewController.h(参考)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "PlayerView.h"
@interface HybridCameraViewController : UIViewController
@property UIButton *button;
@property UIButton *saveButton;
@property UIImageView *previewView;
#define VIDEO_FILE @"test.mov"
@end
HybridCameraViewController.m(参考)
#import "HybridCameraViewController.h"
static const NSString *ItemStatusContext;
@class PlayerView;
@interface HybridCameraViewController () <AVCaptureFileOutputRecordingDelegate>
@end
@implementation HybridCameraViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *imageOutput;
AVCaptureMovieFileOutput *movieOutput;
AVCaptureConnection *videoConnection;
AVPlayer *player;
AVPlayerItem *playerItem;
PlayerView *playerView;
- (void)viewDidLoad {
[super viewDidLoad];
[self testDevices];
self.view.backgroundColor = [UIColor blackColor];
//Image preview
self.previewView = [[UIImageView alloc]initWithFrame:self.view.frame];
self.previewView.backgroundColor = [UIColor whiteColor];
self.previewView.contentMode = UIViewContentModeScaleAspectFill;
self.previewView.hidden = YES;
[self.view addSubview:self.previewView];
//Playerback setup
playerView = [[PlayerView alloc]initWithFrame:self.view.frame];
playerView.backgroundColor = [UIColor redColor];
[self syncUI];
//Buttons
self.button = [self createButtonWithTitle:@"REC" chooseColor:[UIColor redColor]];
UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc]initWithTarget:self action:@selector(handleLongPressGesture:)];
[self.button addGestureRecognizer:longPressRecognizer];
[self.button addTarget:self action:@selector(captureImage) forControlEvents:UIControlEventTouchUpInside];
self.saveButton = [self createSaveButton];
[self.saveButton addTarget:self action:@selector(saveActions) forControlEvents:UIControlEventTouchUpInside];
}
- (void)viewWillAppear:(BOOL)animated {
//Tests
[self initializeAVItems];
NSLog(@"%@", videoConnection);
NSLog(@"%@", imageOutput.connections);
NSLog(@"%@", imageOutput.description.debugDescription);
}
#pragma mark - AV initialization
- (void)initializeAVItems {
//Start session, input
session = [AVCaptureSession new];
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
session.sessionPreset = AVCaptureSessionPresetHigh;
}
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
NSLog(@"%@", error);
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//Layer preview
CALayer *viewLayer = [[self view] layer];
[viewLayer setMasksToBounds:YES];
CGRect frame = self.view.frame;
[previewLayer setFrame:frame];
[viewLayer insertSublayer:previewLayer atIndex:0];
//Image Output
imageOutput = [AVCaptureStillImageOutput new];
NSDictionary *imageOutputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
imageOutput.outputSettings = imageOutputSettings;
//Video Output
movieOutput = [AVCaptureMovieFileOutput new];
[session addOutput:movieOutput];
[session addOutput:imageOutput];
[session startRunning];
}
- (void)testDevices {
NSArray *devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
NSLog(@"Device name: %@", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(@"Device position : back");
}
else {
NSLog(@"Device position : front");
}
}
}
}
#pragma mark - Image capture
- (void)captureImage {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(@"Requesting capture from: %@", imageOutput);
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
self.previewView.image = image;
self.previewView.hidden = NO;
}
}];
[self saveButtonFlyIn:self.saveButton];
}
#pragma mark - Video capture
- (void)captureVideo {
NSLog(@"%@", movieOutput.connections);
[[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil];
videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:movieOutput.connections];
[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections {
for (AVCaptureConnection *connection in connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:mediaType]) {
return connection;
}
}
}
return nil;
}
#pragma mark - Show Last Recording
- (void)presentRecording {
NSLog(@"unplaying");
NSLog(@"%@",[self outputURL]);
}
- (IBAction)loadAssetFromFile {
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[self outputURL] options:nil];
NSString *tracksKey = @"tracks";
[asset loadValuesAsynchronouslyForKeys:@[tracksKey] completionHandler:^{
dispatch_async(dispatch_get_main_queue(),^{
NSError *error;
AVKeyValueStatus status = [asset statusOfValueForKey:tracksKey
error:&error];
if (status == AVKeyValueStatusLoaded) {
playerItem = [AVPlayerItem playerItemWithAsset:asset];
[playerItem addObserver:self forKeyPath:@"status"
options:NSKeyValueObservingOptionInitial
context:&ItemStatusContext];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:playerItem];
player = [AVPlayer playerWithPlayerItem:playerItem];
[playerView setPlayer:player];
}
else {
NSLog(@"The asset's tracks were not loaded:\n%@", [error localizedDescription]);
}
});
}];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context == &ItemStatusContext) {
dispatch_async(dispatch_get_main_queue(),^{
[self syncUI];
});
return;
}
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
- (void)playerItemDidReachEnd:(NSNotification *)notification {
[player seekToTime:kCMTimeZero];
}
- (void)syncUI {
if ((player.currentItem != nil) &&
([player.currentItem status] == AVPlayerItemStatusReadyToPlay)) {
self.button.enabled = YES;
}
else {
self.button.enabled = NO;
}
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
if (!error) {
NSLog(@"Success!!!!");
} else {
NSLog(@"Error: %@", [error localizedDescription]);
}
}
#pragma mark - Recoding Destination URL
- (NSURL *)outputURL {
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE];
return [NSURL fileURLWithPath:filePath];
}
#pragma mark - Buttons
- (void)handleLongPressGesture:(UILongPressGestureRecognizer *)recognizer {
if (recognizer.state == UIGestureRecognizerStateBegan) {
NSLog(@"Press");
self.button.backgroundColor = [UIColor greenColor];
[self captureVideo];
}
if (recognizer.state == UIGestureRecognizerStateEnded) {
NSLog(@"Unpress");
self.button.backgroundColor = [UIColor redColor];
[movieOutput stopRecording];
[self performSelector:@selector(loadAssetFromFile)];
[player play];
}
}
- (UIButton *)createButtonWithTitle:(NSString *)title chooseColor:(UIColor *)color {
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(self.view.center.x, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = color;
button.tintColor = [UIColor whiteColor];
[self.view addSubview:button];
return button;
}
- (UIButton *)createSaveButton {
UIButton *button = [[UIButton alloc]initWithFrame:CGRectMake(self.view.frame.size.width, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = [UIColor greenColor];
button.tintColor = [UIColor whiteColor];
button.userInteractionEnabled = YES;
[button setTitle:@"save" forState:UIControlStateNormal];
[self.view addSubview:button];
return button;
}
- (void)saveButtonFlyIn:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width - 100;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
- (void)saveButtonFlyOut:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
#pragma mark - Save actions
- (void)saveActions {
[self saveButtonFlyOut:self.saveButton];
self.previewView.image = nil;
self.previewView.hidden = YES;
}
@end
该方法过于复杂。你出错的地方是 1) 没有正确加载 url 和 2) 没有将 'player layer' 子层添加到主视图。
播放成功的例子如下:
//TestURL
self.videoURL = [NSURL URLWithString:(NSString *)[self.selectedVideo objectForKey:@"source"]];
//Video
self.player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc]initWithAsset:[AVAsset assetWithURL:self.videoURL]]];
//Player layer
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
self.playerLayer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height / 3);
[self.view.layer addSublayer:self.playerLayer];
[self.player play];
您需要做的就是将 url 重定向到您刚刚保存的那个:
self.videoURL = [self outputURL];
我在播放最近使用类似于 Snapchat 的混合 image/video 相机录制的视频时遇到困难(例如,点击拍照,按住录制视频,播放按钮发布)。
我目前正在将视频文件保存到 NSFileManager。当我注销时,我确实验证了 something 正在保存但无法检查文件,因为它必须在 phone.
上进行测试我注销时的文件路径:
file:///var/mobile/Containers/Data/Application/7D86B14D-ACFF-4494-AD61-CBBD32DCA7A5/Documents/test.mov
当我从文件管理器加载资产时,我注销了无法打开文件的错误。我才刚刚开始使用 AVFoundation,所以在调试时不确定 issues/considerations 是什么。任何见解将不胜感激,谢谢!
参考代码:
PlayerView.h(参考)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface PlayerView : UIView
@property (nonatomic) AVPlayer *player;
- (void)setPlayer:(AVPlayer *)player;
@end
PlayerView.m(参考)
#import "PlayerView.h"
@implementation PlayerView
+ (Class)layerClass {
return [AVPlayerLayer class];
}
- (AVPlayer *)player {
return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player {
[(AVPlayerLayer *)[self layer] setPlayer:player];
}
@end
HybridCameraViewController.h(参考)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "PlayerView.h"
@interface HybridCameraViewController : UIViewController
@property UIButton *button;
@property UIButton *saveButton;
@property UIImageView *previewView;
#define VIDEO_FILE @"test.mov"
@end
HybridCameraViewController.m(参考)
#import "HybridCameraViewController.h"
static const NSString *ItemStatusContext;
@class PlayerView;
@interface HybridCameraViewController () <AVCaptureFileOutputRecordingDelegate>
@end
@implementation HybridCameraViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *imageOutput;
AVCaptureMovieFileOutput *movieOutput;
AVCaptureConnection *videoConnection;
AVPlayer *player;
AVPlayerItem *playerItem;
PlayerView *playerView;
- (void)viewDidLoad {
[super viewDidLoad];
[self testDevices];
self.view.backgroundColor = [UIColor blackColor];
//Image preview
self.previewView = [[UIImageView alloc]initWithFrame:self.view.frame];
self.previewView.backgroundColor = [UIColor whiteColor];
self.previewView.contentMode = UIViewContentModeScaleAspectFill;
self.previewView.hidden = YES;
[self.view addSubview:self.previewView];
//Playerback setup
playerView = [[PlayerView alloc]initWithFrame:self.view.frame];
playerView.backgroundColor = [UIColor redColor];
[self syncUI];
//Buttons
self.button = [self createButtonWithTitle:@"REC" chooseColor:[UIColor redColor]];
UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc]initWithTarget:self action:@selector(handleLongPressGesture:)];
[self.button addGestureRecognizer:longPressRecognizer];
[self.button addTarget:self action:@selector(captureImage) forControlEvents:UIControlEventTouchUpInside];
self.saveButton = [self createSaveButton];
[self.saveButton addTarget:self action:@selector(saveActions) forControlEvents:UIControlEventTouchUpInside];
}
- (void)viewWillAppear:(BOOL)animated {
//Tests
[self initializeAVItems];
NSLog(@"%@", videoConnection);
NSLog(@"%@", imageOutput.connections);
NSLog(@"%@", imageOutput.description.debugDescription);
}
#pragma mark - AV initialization
- (void)initializeAVItems {
//Start session, input
session = [AVCaptureSession new];
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
session.sessionPreset = AVCaptureSessionPresetHigh;
}
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
NSLog(@"%@", error);
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//Layer preview
CALayer *viewLayer = [[self view] layer];
[viewLayer setMasksToBounds:YES];
CGRect frame = self.view.frame;
[previewLayer setFrame:frame];
[viewLayer insertSublayer:previewLayer atIndex:0];
//Image Output
imageOutput = [AVCaptureStillImageOutput new];
NSDictionary *imageOutputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
imageOutput.outputSettings = imageOutputSettings;
//Video Output
movieOutput = [AVCaptureMovieFileOutput new];
[session addOutput:movieOutput];
[session addOutput:imageOutput];
[session startRunning];
}
- (void)testDevices {
NSArray *devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
NSLog(@"Device name: %@", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(@"Device position : back");
}
else {
NSLog(@"Device position : front");
}
}
}
}
#pragma mark - Image capture
- (void)captureImage {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(@"Requesting capture from: %@", imageOutput);
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
self.previewView.image = image;
self.previewView.hidden = NO;
}
}];
[self saveButtonFlyIn:self.saveButton];
}
#pragma mark - Video capture
- (void)captureVideo {
NSLog(@"%@", movieOutput.connections);
[[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil];
videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:movieOutput.connections];
[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections {
for (AVCaptureConnection *connection in connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:mediaType]) {
return connection;
}
}
}
return nil;
}
#pragma mark - Show Last Recording
- (void)presentRecording {
NSLog(@"unplaying");
NSLog(@"%@",[self outputURL]);
}
- (IBAction)loadAssetFromFile {
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[self outputURL] options:nil];
NSString *tracksKey = @"tracks";
[asset loadValuesAsynchronouslyForKeys:@[tracksKey] completionHandler:^{
dispatch_async(dispatch_get_main_queue(),^{
NSError *error;
AVKeyValueStatus status = [asset statusOfValueForKey:tracksKey
error:&error];
if (status == AVKeyValueStatusLoaded) {
playerItem = [AVPlayerItem playerItemWithAsset:asset];
[playerItem addObserver:self forKeyPath:@"status"
options:NSKeyValueObservingOptionInitial
context:&ItemStatusContext];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:playerItem];
player = [AVPlayer playerWithPlayerItem:playerItem];
[playerView setPlayer:player];
}
else {
NSLog(@"The asset's tracks were not loaded:\n%@", [error localizedDescription]);
}
});
}];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context == &ItemStatusContext) {
dispatch_async(dispatch_get_main_queue(),^{
[self syncUI];
});
return;
}
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
- (void)playerItemDidReachEnd:(NSNotification *)notification {
[player seekToTime:kCMTimeZero];
}
- (void)syncUI {
if ((player.currentItem != nil) &&
([player.currentItem status] == AVPlayerItemStatusReadyToPlay)) {
self.button.enabled = YES;
}
else {
self.button.enabled = NO;
}
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
if (!error) {
NSLog(@"Success!!!!");
} else {
NSLog(@"Error: %@", [error localizedDescription]);
}
}
#pragma mark - Recoding Destination URL
- (NSURL *)outputURL {
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE];
return [NSURL fileURLWithPath:filePath];
}
#pragma mark - Buttons
- (void)handleLongPressGesture:(UILongPressGestureRecognizer *)recognizer {
if (recognizer.state == UIGestureRecognizerStateBegan) {
NSLog(@"Press");
self.button.backgroundColor = [UIColor greenColor];
[self captureVideo];
}
if (recognizer.state == UIGestureRecognizerStateEnded) {
NSLog(@"Unpress");
self.button.backgroundColor = [UIColor redColor];
[movieOutput stopRecording];
[self performSelector:@selector(loadAssetFromFile)];
[player play];
}
}
- (UIButton *)createButtonWithTitle:(NSString *)title chooseColor:(UIColor *)color {
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(self.view.center.x, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = color;
button.tintColor = [UIColor whiteColor];
[self.view addSubview:button];
return button;
}
- (UIButton *)createSaveButton {
UIButton *button = [[UIButton alloc]initWithFrame:CGRectMake(self.view.frame.size.width, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = [UIColor greenColor];
button.tintColor = [UIColor whiteColor];
button.userInteractionEnabled = YES;
[button setTitle:@"save" forState:UIControlStateNormal];
[self.view addSubview:button];
return button;
}
- (void)saveButtonFlyIn:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width - 100;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
- (void)saveButtonFlyOut:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
#pragma mark - Save actions
- (void)saveActions {
[self saveButtonFlyOut:self.saveButton];
self.previewView.image = nil;
self.previewView.hidden = YES;
}
@end
该方法过于复杂。你出错的地方是 1) 没有正确加载 url 和 2) 没有将 'player layer' 子层添加到主视图。
播放成功的例子如下:
//TestURL
self.videoURL = [NSURL URLWithString:(NSString *)[self.selectedVideo objectForKey:@"source"]];
//Video
self.player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc]initWithAsset:[AVAsset assetWithURL:self.videoURL]]];
//Player layer
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
self.playerLayer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height / 3);
[self.view.layer addSublayer:self.playerLayer];
[self.player play];
您需要做的就是将 url 重定向到您刚刚保存的那个:
self.videoURL = [self outputURL];