I have an app that plays some background music, when a certain button is pushed, it pushes a new view using the navigation controller. That new view has a MPMoviePlayerController and an AVCaptureSession with a shared AudioSession. After that view is dismissed, the sound in the background is really soft compared to how it was. What is it that is causing the volume to be so subdued after playing?
NSError* error4 = nil;
AVAudioSession* audioSession = [AVAudioSession sharedInstance];
if (![audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:&error4]) {
NSLog(#"AVAudioSession setCategory failed: %#", [error4 localizedDescription]);
}
// Set audio session property "allow mixing" to true so audio can be recorded while it is playing
UInt32 allowMixing = true;
OSStatus status = AudioSessionSetProperty(kAudioSessionProperty_OverrideCategoryMixWithOthers, sizeof(allowMixing), &allowMixing);
if (status != kAudioSessionNoError) {
NSLog(#"AudioSessionSetProperty(kAudioSessionProperty_OverrideCategoryMixWithOthers) failed: %ld", status);
}
// Activate the audio session
error4 = nil;
if (![audioSession setActive:YES error:&error4]) {
NSLog(#"AVAudioSession setActive:YES failed: %#", [error4 localizedDescription]);
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *proud = [[documentsDirectoryPath stringByAppendingPathComponent:#"imissyou"] stringByAppendingPathComponent:selectedCountry];
NSURL *movieURL = [[NSURL fileURLWithPath:proud] retain];
player =
[[MPMoviePlayerController alloc] initWithContentURL: movieURL];
player.useApplicationAudioSession=YES;
[player prepareToPlay];
player.controlStyle = MPMovieControlStyleNone;
player.allowsAirPlay = NO;
player.scalingMode = MPMovieScalingModeFill;
player.view.frame = self.view.frame;
[self.view insertSubview:player.view belowSubview:vImagePreview];
[player setFullscreen:YES animated:YES];
// ...
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:#selector(movieFinishedCallback:)
name:MPMoviePlayerPlaybackDidFinishNotification
object:player];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(exitedFullscreen:) name:MPMoviePlayerDidExitFullscreenNotification object:player];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlayerWillExitFullscreen:)
name:MPMoviePlayerWillExitFullscreenNotification
object:player];
[player play];
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[captureVideoPreviewLayer setCornerRadius:14];
[captureVideoPreviewLayer setBorderWidth:3.0];
[captureVideoPreviewLayer setBorderColor:[[UIColor whiteColor] CGColor]];
[[vImagePreview layer] setCornerRadius:14];
[[vImagePreview layer] setBorderWidth:3.0];
[[vImagePreview layer] setBorderColor:[[UIColor whiteColor] CGColor]];
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
[captureVideoPreviewLayer release];
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error2 = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error2];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:#"archives"];
NSString *editedfilename = [[selectedCountry lastPathComponent] stringByDeletingPathExtension];
NSString *datestring = [[editedfilename stringByAppendingString:#" "] stringByAppendingString:currentTime];
NSLog(#"%#", datestring);
NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:datestring] stringByAppendingString:#".mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[session addInput:input];
[session addInput:audioInput];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[movieURL release];
[outputURL release];
The audio session may be "ducking" the background music. You might want to set the property "kAudioSessionProperty_OtherMixableAudioShouldDuck" to false to disable that.
I got it figured out now. I had category set to PlayAndRecord. I changed it to Ambient (which also allows to record audio while playing audio) and removed property of OverrideCategoryMixWithOthers, leaving just
OSStatus propertySetError = 0;
UInt32 allowMixing = true;
propertySetError |= AudioSessionSetProperty(kAudioSessionProperty_OtherMixableAudioShouldDuck, sizeof(allowMixing), &allowMixing);
Related
Hi I am Trying to Play Video Files From NSCachesDirectory. The Player is not loading the Files from NSCachesDirectory in videoView layer.
Is it Possible to play Video from cacheDirectory Memory?
Please suggest me...
I have tried this..
NSArray *myPathList = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *cachesDir = [myPathList objectAtIndex:0];
NSString *songPath = [[NSString alloc] initWithString: [cachesDir stringByAppendingPathComponent:[NSString stringWithFormat:#"Song.%#", downloadURL]]];
NSURL *pathurl = [[NSURL alloc] initFileURLWithPath:songPath];
NSLog(#"--pathurl---%#",pathurl);
avPlayer =[AVPlayer playerWithURL:pathurl];
self.avPlayerLayer =[AVPlayerLayer playerLayerWithPlayer:avPlayer];
if ( !([avPlayer status] == AVPlayerStatusReadyToPlay) && (dataReceivedSoFar.length >10000))
{
avPlayerLayer.frame =CGRectMake(0, 0, 320, 450);
[[self.videoView layer] addSublayer:avPlayerLayer];
[avPlayer play];
NSLog(#"Video IS Playing");
[alert dismissWithClickedButtonIndex:0 animated:YES];
}
KVO approach.
Add observer:
self.player = [AVPlayer playerWithURL:url];
[self.player addObserver:self forKeyPath:#"status" options:0 context:&PlayerStatusContext];
Observing value
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
change:(NSDictionary *)change context:(void *)context {
if (context == &PlayerStatusContext) {
AVPlayer *thePlayer = (AVPlayer *)object;
if ([thePlayer status] == AVPlayerStatusFailed) {
NSError *error = [self.player error];
// Respond to error: for example, display an alert sheet.
return;
}
// Deal with other status change if appropriate.
if ([thePlayer status] == AVPlayerStatusReadyToPlay) {
[self play];
}
}
// Deal with other change notifications if appropriate.
[super observeValueForKeyPath:keyPath ofObject:object
change:change context:context];
return;
}
Playing:
- (IBAction)play:sender {
[self.player play];
}
EDITING:
As awolCZ said AVPlayer can not play partially downloaded files. But you could use something like http://test.com/test.mp3 for URL.
I'm not sure that [avPlayer status] == AVPlayerStatusReadyToPlay returns YES in this case. It takes some time for AVPlayer to load the item. You should can play immediately even if it's better way to track AVPlayer.status using KVO.
Lukas
EDIT: Try it this way (quick win):
NSArray *myPathList = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *cachesDir = [myPathList objectAtIndex:0];
NSString *songPath = [[NSString alloc] initWithString: [cachesDir stringByAppendingPathComponent:[NSString stringWithFormat:#"Song.%#", downloadURL]]];
NSURL *pathurl = [[NSURL alloc] initFileURLWithPath:songPath];
NSLog(#"--pathurl---%#",pathurl);
avPlayer =[AVPlayer playerWithURL:pathurl];
self.avPlayerLayer =[AVPlayerLayer playerLayerWithPlayer:avPlayer];
avPlayerLayer.frame =CGRectMake(0, 0, 320, 450);
[[self.videoView layer] addSublayer:avPlayerLayer];
[avPlayer play];
I am using AVCapture Session to record video same time i am using STT(google Speech to text api) to convert voice into text. I have facing a problem when I click on the speak button then camera get freezes. Any correct answer will be acceptable. Thanks in advance .
To start camera in
-(void)viewDidLoad;
if ([[self captureManager] setupSession]) {
// Create video preview layer and add it to the UI
AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:[[self captureManager] session]];
UIView *view = [self videoPreviewView];
CALayer *viewLayer = [view layer];
[viewLayer setMasksToBounds:YES];
CGRect bounds = [view bounds];
[newCaptureVideoPreviewLayer setFrame:bounds];
if ([newCaptureVideoPreviewLayer isOrientationSupported]) {
[newCaptureVideoPreviewLayer setOrientation:AVCaptureVideoOrientationLandscapeLeft|AVCaptureVideoOrientationLandscapeRight];
}
[newCaptureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[viewLayer insertSublayer:newCaptureVideoPreviewLayer below:[[viewLayer sublayers] objectAtIndex:0]];
[self setCaptureVideoPreviewLayer:newCaptureVideoPreviewLayer];
[newCaptureVideoPreviewLayer release];
// Start the session. This is done asychronously since -startRunning doesn't return until the session is running.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[[[self captureManager] session] startRunning];
});
[self updateButtonStates];
}
- (BOOL) setupSession
{
BOOL success = NO;
// Set torch and flash mode to auto
if ([[self backFacingCamera] hasFlash]) {
if ([[self backFacingCamera] lockForConfiguration:nil]) {
if ([[self backFacingCamera] isFlashModeSupported:AVCaptureFlashModeAuto]) {
[[self backFacingCamera] setFlashMode:AVCaptureFlashModeAuto];
}
[[self backFacingCamera] unlockForConfiguration];
}
}
if ([[self backFacingCamera] hasTorch]) {
if ([[self backFacingCamera] lockForConfiguration:nil]) {
if ([[self backFacingCamera] isTorchModeSupported:AVCaptureTorchModeAuto]) {
[[self backFacingCamera] setTorchMode:AVCaptureTorchModeAuto];
}
[[self backFacingCamera] unlockForConfiguration];
}
}
// Init the device inputs
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontFacingCamera] error:nil];
AVCaptureDeviceInput *newAudioInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self audioDevice] error:nil];
// Setup the still image file output
AVCaptureStillImageOutput *newStillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey,
nil];
[newStillImageOutput setOutputSettings:outputSettings];
[outputSettings release];
// Create session (use default AVCaptureSessionPresetHigh)
AVCaptureSession *newCaptureSession = [[AVCaptureSession alloc] init];
// Add inputs and output to the capture session
if ([newCaptureSession canAddInput:newVideoInput]) {
[newCaptureSession addInput:newVideoInput];
}
if ([newCaptureSession canAddInput:newAudioInput]) {
[newCaptureSession addInput:newAudioInput];
}
if ([newCaptureSession canAddOutput:newStillImageOutput]) {
[newCaptureSession addOutput:newStillImageOutput];
}
[self setStillImageOutput:newStillImageOutput];
[self setVideoInput:newVideoInput];
[self setAudioInput:newAudioInput];
[self setSession:newCaptureSession];
[newStillImageOutput release];
[newVideoInput release];
[newAudioInput release];
[newCaptureSession release];
// Set up the movie file output
NSURL *outputFileURL = [self tempFileURL];
AVCamRecorder *newRecorder = [[AVCamRecorder alloc] initWithSession:[self session] outputFileURL:outputFileURL];
[newRecorder setDelegate:self];
// Send an error to the delegate if video recording is unavailable
if (![newRecorder recordsVideo] && [newRecorder recordsAudio]) {
NSString *localizedDescription = NSLocalizedString(#"Video recording unavailable", #"Video recording unavailable description");
NSString *localizedFailureReason = NSLocalizedString(#"Movies recorded on this device will only contain audio. They will be accessible through iTunes file sharing.", #"Video recording unavailable failure reason");
NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
localizedDescription, NSLocalizedDescriptionKey,
localizedFailureReason, NSLocalizedFailureReasonErrorKey,
nil];
NSError *noVideoError = [NSError errorWithDomain:#"AVCam" code:0 userInfo:errorDict];
if ([[self delegate] respondsToSelector:#selector(captureManager:didFailWithError:)]) {
[[self delegate] captureManager:self didFailWithError:noVideoError];
}
}
[self setRecorder:newRecorder];
[newRecorder release];
success = YES;
return success;
}
I have created a streaming video app. and now i want to record that video which is playing in MPVideoPlayer,also its sound. I have tried this code. but this code is only record black screen and no sound. I am adding MPVideoPlayer instance dynamically. so , plz help me to do this any source code or library will be great help.
Try this :
.h file
import MediaPlayer/MediaPlayer.h
NSString *strSelectedVideoPath;
MPMoviePlayerController *player
#property (nonatomic,retain) NSString *strSelectedVideoPath;
#property (nonatomic,retain) MPMoviePlayerController *player;
.m file
-(void) viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
player = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL URLWithString:strSelectedVideoPath]];
player.scalingMode = MPMovieScalingModeFill;
player.movieSourceType = MPMovieSourceTypeFile;
player.view.frame = CGRectMake(0, 45, 320, 400);
player.shouldAutoplay = YES;
[player prepareToPlay];
[self.view addSubview:player.view];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(movieFinishedCallback:) name:MPMoviePlayerPlaybackDidFinishNotification object:player];
[player play];
}
- (void) movieFinishedCallback:(NSNotification*) aNotification {
MPMoviePlayerController *player1 = [aNotification object];
[[NSNotificationCenter defaultCenter] removeObserver:self name:MPMoviePlayerPlaybackDidFinishNotification object:player1];
[player1.view removeFromSuperview];
player1 = nil;
}
You have to add MediaPlayer.framework
For Record video :
-(void) imagePickerController: (UIImagePickerController *) picker didFinishPickingMediaWithInfo: (NSDictionary *) info {
NSString *mediaType = [info objectForKey: UIImagePickerControllerMediaType];
[self dismissModalViewControllerAnimated:NO];
if (CFStringCompare ((CFStringRef) mediaType, kUTTypeMovie, 0)
== kCFCompareEqualTo) {
NSString *moviePath = [[info objectForKey: UIImagePickerControllerMediaURL] path];
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum (moviePath)) {
UISaveVideoAtPathToSavedPhotosAlbum (moviePath,self,#selector(video:didFinishSavingWithError:contextInfo:), nil);
}
}
}
- (void)video:(NSString*)videoPath didFinishSavingWithError:(NSError*)error contextInfo:(void*)contextInfo {
if (error) {
}
else{
NSURL *videoURl = [NSURL fileURLWithPath:videoPath];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURl options:nil];
AVAssetImageGenerator *generate = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generate.appliesPreferredTrackTransform = YES;
NSError *err = NULL;
//For Thumb Image
CMTime time = CMTimeMake(1, 60);
CGImageRef imgRef = [generate copyCGImageAtTime:time actualTime:NULL error:&err];
self.strUploadVideoPath = videoPath;
CGImageRelease(imgRef);
[generate release];
[asset release];
}
}
If I just have the video added as an input, the preview layer works fine, and the video records fine, but if I try to add audio, the preview layer freezes, and video file is corrupted. What could be going on here that is causing all of this?
-(void) record {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:#"archives"];
NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:#"Test"] stringByAppendingString:#".mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[session addInput:input];
[session addInput:audioInput];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
-(AVCaptureDevice *)frontFacingCameraIfAvailable
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
I want to make twin screen using built-in camera on iOS
I tried following code, but it shows just one view.
It's a natural result, I know.
Here's the code what I used..
- (void)prepareCameraView:(UIView *)window
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = window.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]
initWithSession:session];
captureVideoPreviewLayer.frame = window.bounds;
[window.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (!input)
{
NSLog(#"ERROR : trying to open camera : %#", error);
}
[session addInput:input];
[session startRunning];
}
How can I get double screen on iOS?
// Use this code
AVCaptureSession *session = [AVCaptureSession new];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ( [session canAddInput:deviceInput])
{
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[previewLayer setFrame:CGRectMake(0.0, 0.0, self.view.bounds.size.width, self.view.bounds.size.height)];
NSUInteger replicatorInstances = 2;
CGFloat replicatorViewHeight = (self.view.bounds.size.height - 64)/replicatorInstances;
CAReplicatorLayer *replicatorLayer = [CAReplicatorLayer layer];
replicatorLayer.frame = CGRectMake(0, 0.0, self.view.bounds.size.width, replicatorViewHeight);
replicatorLayer.instanceCount = replicatorInstances;
replicatorLayer.instanceTransform = CATransform3DMakeTranslation(0.0, replicatorViewHeight, 0.0);
[replicatorLayer addSublayer:previewLayer];
[self.view.layer addSublayer:replicatorLayer];
[session startRunning];
Try this:
- (void)prepareCameraView:(UIView *)window
{
NSArray *captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = window.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = CGRectMake(0.0f, 0.0f, window.bounds.size.width/2.0f, window.bounds.size.height);
[window.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:[captureDevices objectAtIndex:0] error:&error];
if (!input)
{
NSLog(#"ERROR : trying to open camera : %#", error);
}
[session addInput:input];
[session startRunning];
}
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = window.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = CGRectMake(window.bounds.size.width/2.0f, 0.0f, window.bounds.size.width/2.0f, window.bounds.size.height);
[window.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:[captureDevices objectAtIndex:1] error:&error];
if (!input)
{
NSLog(#"ERROR : trying to open camera : %#", error);
}
[session addInput:input];
[session startRunning];
}
}
Note that it makes absolutely no checks that there are actually 2 cameras and it splits it vertically so this is probably best viewed in landscape. You'll want to add some checks into that code and work out exactly how you want to lay out the layers of each camera before using it.