使用 AVFoundation 框架进行视频录制
Video recording using AVFoundation framework
我正在使用 AVFoundation 框架进行视频录制项目。我正在录制分辨率为 640x480 的视频。但是在记录之后尺寸发生了变化。
我做错了什么?请任何人帮助我?
试试这个
[self.avcSession setSessionPreset:AVCaptureSessionPreset640x480];
这是我的代码 -
Here is my code :-
#pragma mark - Configure Video Recorder method
-(void)configureVideoRecorder{
//---------------------------------
//----- SETUP CAPTURE SESSION -----
//---------------------------------
NSLog(@"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
[CaptureSession beginConfiguration];
//----- ADD INPUTS -----
NSLog(@"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(@"Couldn't add video input");
}
else
{
NSLog(@"Couldn't create video input");
}
}
else
{
NSLog(@"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(@"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(@"Adding video preview layer");
[self setPreviewLayer:[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(@"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
// if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
// [CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(@"Display the preview layer");
CGRect layerRect = [[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[UIView alloc] init];
[[self view] addSubview:CameraView];
[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
[CaptureSession commitConfiguration];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
#pragma mark - CAMERA SET OUTPUT PROPERTIES method
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
#pragma mark - GET CAMERA IN SPECIFIED POSITION IF IT EXISTS method
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
return Device;
}
}
return nil;
}
#pragma mark - START STOP RECORDING method
- (void)recordVideo
{
if (!WeAreRecording)
{
//----- START RECORDING -----
NSLog(@"START RECORDING");
WeAreRecording = YES;
//Create temporary URL to record to
NSString *outputPath;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
outputPath = [documentsDirectory stringByAppendingFormat:@"/firstVideo.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
else
{
//----- STOP RECORDING -----
NSLog(@"STOP RECORDING");
WeAreRecording = NO;
[audioPlayer stop];
[MovieFileOutput stopRecording];
}
}
#pragma mark - AVCaptureFileOutput didStartRecordingToOutputFileAtURL method
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
[self playSound:selectedMp3];
}
#pragma mark - DID FINISH RECORDING TO OUTPUT FILE AT URL method
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(@"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(@"didFinishRecordingToOutputFileAtURL - success");
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:@"Video recorded successfully" delegate:nil cancelButtonTitle:nil otherButtonTitles:@"OK", nil];
alert.tag = 1;
[alert show];
}
}
我正在使用 AVFoundation 框架进行视频录制项目。我正在录制分辨率为 640x480 的视频。但是在记录之后尺寸发生了变化。
我做错了什么?请任何人帮助我?
试试这个
[self.avcSession setSessionPreset:AVCaptureSessionPreset640x480];
这是我的代码 -
Here is my code :-
#pragma mark - Configure Video Recorder method
-(void)configureVideoRecorder{
//---------------------------------
//----- SETUP CAPTURE SESSION -----
//---------------------------------
NSLog(@"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
[CaptureSession beginConfiguration];
//----- ADD INPUTS -----
NSLog(@"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(@"Couldn't add video input");
}
else
{
NSLog(@"Couldn't create video input");
}
}
else
{
NSLog(@"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(@"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(@"Adding video preview layer");
[self setPreviewLayer:[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(@"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
// if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
// [CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(@"Display the preview layer");
CGRect layerRect = [[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[UIView alloc] init];
[[self view] addSubview:CameraView];
[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
[CaptureSession commitConfiguration];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
#pragma mark - CAMERA SET OUTPUT PROPERTIES method
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
#pragma mark - GET CAMERA IN SPECIFIED POSITION IF IT EXISTS method
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
return Device;
}
}
return nil;
}
#pragma mark - START STOP RECORDING method
- (void)recordVideo
{
if (!WeAreRecording)
{
//----- START RECORDING -----
NSLog(@"START RECORDING");
WeAreRecording = YES;
//Create temporary URL to record to
NSString *outputPath;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
outputPath = [documentsDirectory stringByAppendingFormat:@"/firstVideo.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
else
{
//----- STOP RECORDING -----
NSLog(@"STOP RECORDING");
WeAreRecording = NO;
[audioPlayer stop];
[MovieFileOutput stopRecording];
}
}
#pragma mark - AVCaptureFileOutput didStartRecordingToOutputFileAtURL method
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
[self playSound:selectedMp3];
}
#pragma mark - DID FINISH RECORDING TO OUTPUT FILE AT URL method
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(@"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(@"didFinishRecordingToOutputFileAtURL - success");
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:@"Video recorded successfully" delegate:nil cancelButtonTitle:nil otherButtonTitles:@"OK", nil];
alert.tag = 1;
[alert show];
}
}