Доступ к камере iPhone? - PullRequest
       25

Доступ к камере iPhone?

2 голосов
/ 24 августа 2010

Я хочу знать, как получить доступ к камере iphones и работать с ней в режиме реального времени: например, просто нарисовать изображение с камеры.

Другой связанный вопрос:

Можно ли отобразить 4 вида с камеры одновременно , как в "Фотобудке" на Mac.

Ответы [ 2 ]

4 голосов
/ 09 апреля 2011

Вы можете сделать это, используя AVFoundation

- (void)initCapture {

    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput 
                                          deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
                                          error:nil];

    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];

    captureOutput.alwaysDiscardsLateVideoFrames = YES; 

    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    [captureOutput setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 
    [captureOutput setVideoSettings:videoSettings]; 


    self.captureSession = [[AVCaptureSession alloc] init];
    [self.captureSession setSessionPreset:AVCaptureSessionPresetLow];

    [self.captureSession addInput:captureInput];
    [self.captureSession addOutput:captureOutput];

    [self.captureSession startRunning];

    self.customLayer = [CALayer layer];

    self.customLayer.frame =CGRectMake(5-25,25, 200,150);

    self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);

    //self.customLayer.transform =CATransform3DMakeRotation(M_PI/2.0f, 0, 0, 1);


    //[self.view.layer addSublayer:imageView.layer];
    //self.customLayer.frame =CGRectMake(0, 0, 200,150);
    //self.customLayer.contentsGravity = kCAGravityResizeAspectFill;

    [self.view.layer insertSublayer:self.customLayer atIndex:4];
    //[self.view.layer addSublayer:self.customLayer];


    self.customLayer1 = [CALayer layer];
    //self.customLayer.frame = self.view.bounds;
    self.customLayer1.frame =CGRectMake(165-25, 25, 200, 150);
    self.customLayer1.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer1];




    self.customLayer2 = [CALayer layer];
    //self.customLayer.frame = self.view.bounds;
    self.customLayer2.frame =CGRectMake(5-25, 210 +25, 200, 150);
    self.customLayer2.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer2];


    self.customLayer3 = [CALayer layer];
    //self.customLayer.frame = self.view.bounds;
    self.customLayer3.frame =CGRectMake(165-25, 210 +25, 200, 150);
    self.customLayer3.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer3];



}



#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
       fromConnection:(AVCaptureConnection *)connection 
{ 


    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    /*Lock the image buffer*/
    CVPixelBufferLockBaseAddress(imageBuffer,0); 
    /*Get information about the image*/
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer);  


    /*Create a CGImageRef from the CVImageBufferRef*/
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 



    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage2 = CGBitmapContextCreateImage(newContext); 
    /*We release some components*/
    CGContextRelease(newContext); 
    CGColorSpaceRelease(colorSpace);

    [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
    [self.customLayer1 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
    [self.customLayer2 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
    [self.customLayer3 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];


    //  UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];


    /*We relase the CGImageRef*/
    CGImageRelease(newImage2);

    //  [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];

    /*We unlock the  image buffer*/
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    [pool drain];

} 

, он отлично работает ..

http://crayoncoding.blogspot.com/2011/04/iphone-4-camera-views-at-once.html

см. Ссылку выше для подробного кода

0 голосов
/ 03 марта 2011

Вы можете попробовать иметь 4 UIImagePickerControllers.не уверен, что это сработает, но стоит того.

Доступ к камере с помощью iPhone SDK

...