iphone - Can capture session in avcam have multiple objects at a time -
here using avfoundation framework record video ,and i'm trying use both front , cam @ time here if front cam works cam freezing @ first frame , vise versa.can capturesession have multiple objects @ time.here code can 1 me please.
- (void)viewdidload { [super viewdidload]; self.view.frame = cgrectmake(0, 0, 320, 568); self.view.backgroundcolor = [uicolor clearcolor]; activeindicatorview = [[uiview alloc]initwithframe:cgrectmake(0, 0, 320, 250)]; activeindicatorview.backgroundcolor = [uicolor clearcolor]; [self.view addsubview:activeindicatorview]; activeindicatorviewback = [[uiview alloc]initwithframe:cgrectmake(0, 290, 320, 250)]; activeindicatorviewback.backgroundcolor = [uicolor greencolor]; [self.view addsubview:activeindicatorviewback]; tracksarray = [[nsmutablearray alloc] initwithcapacity:2]; [self startrecording]; } -(void)startrecording{ [self startcameratwo];//front camera [self startcamera];//back camera } -(void)startcamera{ // nslog(@"setting capture session"); capturesessionback = [[avcapturesession alloc] init]; //add video input avcapturedevice *videodeviceback = [avcapturedevice defaultdevicewithmediatype:avmediatypevideo]; // videodeviceback = [self frontcamera]; if (videodeviceback) { nserror *error; videoinputdeviceback = [avcapturedeviceinput deviceinputwithdevice:videodeviceback error:&error]; if (!error) { if ([capturesessionback canaddinput:videoinputdeviceback]) [capturesessionback addinput:videoinputdeviceback]; else nslog(@""); } else { //nslog(@"couldn't create video input"); } } else { } avcapturedevice *audiocapturedeviceback = [avcapturedevice defaultdevicewithmediatype:avmediatypeaudio]; nserror *error = nil; avcapturedeviceinput *audioinputback = [avcapturedeviceinput deviceinputwithdevice:audiocapturedeviceback error:&error]; if (audioinputback) { [capturesessionback addinput:audioinputback]; } previewback = [self videopreviewwithframeback :activeindicatorviewback.bounds]; previewback.backgroundcolor = [uicolor graycolor]; [activeindicatorviewback addsubview:previewback]; // [preview addsubview:vie]; moviefileoutputback = [[avcapturemoviefileoutput alloc] init]; moviefileoutputback.minfreediskspacelimit = 1024 * 1024; //<<set min free space in bytes recording continue on volume if ([capturesessionback canaddoutput:moviefileoutputback]) [capturesessionback addoutput:moviefileoutputback]; //set connection properties (output properties) [self camerasetoutputpropertiesback]; //(we call method has done after changing camera) //nslog(@"setting image quality"); [capturesessionback setsessionpreset:avcapturesessionpreset640x480]; if ([capturesessionback cansetsessionpreset:avcapturesessionpreset640x480]) { //check size based configs supported before setting them [capturesessionback setsessionpreset:avcapturesessionpreset640x480]; } if ([videodeviceback isfocusmodesupported:avcapturefocusmodeautofocus] && [videodeviceback lockforconfiguration:&error]){ [videodeviceback setfocusmode:avcapturefocusmodeautofocus]; if ([videodeviceback isfocuspointofinterestsupported]) [videodeviceback setfocuspointofinterest:cgpointmake(0.5f,0.5f)]; [videodeviceback unlockforconfiguration]; } [capturesessionback startrunning]; } -(void)startcameratwo{ // nslog(@"setting capture session"); capturesession = [[avcapturesession alloc] init]; //add video input avcapturedevice *videodevice = [avcapturedevice defaultdevicewithmediatype:avmediatypevideo]; videodevice = [self frontcamera]; if (videodevice) { nserror *error; videoinputdevice = [avcapturedeviceinput deviceinputwithdevice:videodevice error:&error]; if (!error) { if ([capturesession canaddinput:videoinputdevice]) [capturesession addinput:videoinputdevice]; else nslog(@""); } else { //nslog(@"couldn't create video input"); } } else { } avcapturedevice *audiocapturedevice = [avcapturedevice defaultdevicewithmediatype:avmediatypeaudio]; nserror *error = nil; avcapturedeviceinput *audioinput = [avcapturedeviceinput deviceinputwithdevice:audiocapturedevice error:&error]; if (audioinput) { [capturesession addinput:audioinput]; } preview = [self videopreviewwithframe:activeindicatorview.bounds]; activeindicatorview.backgroundcolor = [uicolor redcolor]; [activeindicatorview addsubview:preview]; moviefileoutput = [[avcapturemoviefileoutput alloc] init]; moviefileoutput.minfreediskspacelimit = 1024 * 1024; //<<set min free space in bytes recording continue on volume if ([capturesession canaddoutput:moviefileoutput]) [capturesession addoutput:moviefileoutput]; //set connection properties (output properties) [self camerasetoutputproperties]; //(we call method has done after changing camera) //nslog(@"setting image quality"); [capturesession setsessionpreset:avcapturesessionpreset640x480]; if ([capturesession cansetsessionpreset:avcapturesessionpreset640x480]) { //check size based configs supported before setting them [capturesession setsessionpreset:avcapturesessionpreset640x480]; } if ([videodevice isfocusmodesupported:avcapturefocusmodeautofocus] && [videodevice lockforconfiguration:&error]){ [videodevice setfocusmode:avcapturefocusmodeautofocus]; if ([videodevice isfocuspointofinterestsupported]) [videodevice setfocuspointofinterest:cgpointmake(0.5f,0.5f)]; [videodevice unlockforconfiguration]; } [capturesession startrunning]; } - (void) camerasetoutputproperties { //set connection properties (output properties) captureconnection = [moviefileoutput connectionwithmediatype:avmediatypevideo]; //set landscape (if required) if ([captureconnection isvideoorientationsupported]) { avcapturevideoorientation orientation = avcapturevideoorientationlandscapeleft; //<<<<<set video orientation if landscape [captureconnection setvideoorientation:orientation]; } //set frame rate (if requried) cmtimeshow(captureconnection.videominframeduration); cmtimeshow(captureconnection.videomaxframeduration); if (captureconnection.supportsvideominframeduration) captureconnection.videominframeduration = cmtimemake(1, capture_frames_per_second); if (captureconnection.supportsvideomaxframeduration) captureconnection.videomaxframeduration = cmtimemake(1, capture_frames_per_second); cmtimeshow(captureconnection.videominframeduration); cmtimeshow(captureconnection.videomaxframeduration); } - (void) camerasetoutputpropertiesback { //set connection properties (output properties) captureconnectionback = [moviefileoutputback connectionwithmediatype:avmediatypevideo]; //set landscape (if required) if ([captureconnectionback isvideoorientationsupported]) { avcapturevideoorientation orientation = avcapturevideoorientationlandscapeleft; //<<<<<set video orientation if landscape [captureconnectionback setvideoorientation:orientation]; } //set frame rate (if requried) cmtimeshow(captureconnectionback.videominframeduration); cmtimeshow(captureconnectionback.videomaxframeduration); if (captureconnectionback.supportsvideominframeduration) captureconnectionback.videominframeduration = cmtimemake(1, capture_frames_per_second); if (captureconnectionback.supportsvideomaxframeduration) captureconnectionback.videomaxframeduration = cmtimemake(1, capture_frames_per_second); cmtimeshow(captureconnectionback.videominframeduration); cmtimeshow(captureconnectionback.videomaxframeduration); } - (avcapturedevice *)frontcamera { nsarray *devices = [avcapturedevice deviceswithmediatype:avmediatypevideo]; (avcapturedevice *device in devices) { if ([device position] == avcapturedevicepositionfront) { return device; } } return nil; }
currently it's not possible use front , facing cameras @ same time.
Comments
Post a Comment