2010-08-24 24 views
2

Je veux savoir comment accéder à la caméra iphones et travailler avec elle en temps réel: par exemple, dessiner sur la caméra.IPhone SDK: Accès à la caméra?

Une autre question connexe:

Puis-je afficher 4 caméra-vues à la fois comme dans "Photo Booth" sur le Mac.

+0

Je suis en supposant que vous voulez appliquer des effets à chacun, comme o n le Mac. Pas sûr que cela fonctionnerait même sur n'importe quoi sauf l'iPhone 4, compte tenu de la puissance GPU nécessaire pour le retirer. –

Répondre

4

Vous pouvez le faire en utilisant AVFoundation

- (void)initCapture { 

    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput 
              deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
              error:nil]; 

    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; 

    captureOutput.alwaysDiscardsLateVideoFrames = YES; 

    dispatch_queue_t queue; 
    queue = dispatch_queue_create("cameraQueue", NULL); 
    [captureOutput setSampleBufferDelegate:self queue:queue]; 
    dispatch_release(queue); 

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 
    [captureOutput setVideoSettings:videoSettings]; 


    self.captureSession = [[AVCaptureSession alloc] init]; 
    [self.captureSession setSessionPreset:AVCaptureSessionPresetLow]; 

    [self.captureSession addInput:captureInput]; 
    [self.captureSession addOutput:captureOutput]; 

    [self.captureSession startRunning]; 

    self.customLayer = [CALayer layer]; 

    self.customLayer.frame =CGRectMake(5-25,25, 200,150); 

    self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1); 

    //self.customLayer.transform =CATransform3DMakeRotation(M_PI/2.0f, 0, 0, 1); 


    //[self.view.layer addSublayer:imageView.layer]; 
    //self.customLayer.frame =CGRectMake(0, 0, 200,150); 
    //self.customLayer.contentsGravity = kCAGravityResizeAspectFill; 

    [self.view.layer insertSublayer:self.customLayer atIndex:4]; 
    //[self.view.layer addSublayer:self.customLayer]; 


    self.customLayer1 = [CALayer layer]; 
    //self.customLayer.frame = self.view.bounds; 
    self.customLayer1.frame =CGRectMake(165-25, 25, 200, 150); 
    self.customLayer1.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1); 
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill; 
    [self.view.layer addSublayer:self.customLayer1]; 




    self.customLayer2 = [CALayer layer]; 
    //self.customLayer.frame = self.view.bounds; 
    self.customLayer2.frame =CGRectMake(5-25, 210 +25, 200, 150); 
    self.customLayer2.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1); 
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill; 
    [self.view.layer addSublayer:self.customLayer2]; 


    self.customLayer3 = [CALayer layer]; 
    //self.customLayer.frame = self.view.bounds; 
    self.customLayer3.frame =CGRectMake(165-25, 210 +25, 200, 150); 
    self.customLayer3.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1); 
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill; 
    [self.view.layer addSublayer:self.customLayer3]; 



} 



#pragma mark - 
#pragma mark AVCaptureSession delegate 
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
     fromConnection:(AVCaptureConnection *)connection 
{ 


    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init]; 

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    /*Lock the image buffer*/ 
    CVPixelBufferLockBaseAddress(imageBuffer,0); 
    /*Get information about the image*/ 
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 


    /*Create a CGImageRef from the CVImageBufferRef*/ 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 



    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    CGImageRef newImage2 = CGBitmapContextCreateImage(newContext); 
    /*We release some components*/ 
    CGContextRelease(newContext); 
    CGColorSpaceRelease(colorSpace); 

    [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES]; 
    [self.customLayer1 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES]; 
    [self.customLayer2 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES]; 
    [self.customLayer3 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES]; 


    // UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight]; 


    /*We relase the CGImageRef*/ 
    CGImageRelease(newImage2); 

    // [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES]; 

    /*We unlock the image buffer*/ 
    CVPixelBufferUnlockBaseAddress(imageBuffer,0); 

    [pool drain]; 

} 

il fonctionne très bien ..

http://crayoncoding.blogspot.com/2011/04/iphone-4-camera-views-at-once.html

voir le lien ci-dessus pour le code de détail