Проблемы памяти AVCaptureSession - PullRequest
0 голосов
/ 30 июня 2019

Я довольно новичок в мире задач c, но эта проблема кажется гораздо более сложной, чем просто быстрый поиск в Google.Я использую Ionic / Cordova, и я создал свой собственный плагин, и он уже давно работает на 100% на Android.Мне было поручено разработать плагин для работы с iOS.

Плагин состоит из модифицированной версии плагина Cordova Camera Preview и демонстрационной версии Firebase MLKit.Я использую это, чтобы запустить предварительный просмотр камеры в фоновом режиме для распознавания текста в режиме реального времени с использованием Firebase MLKit TextRecognizer.

Моя текущая проблема, кажется, AVCaptureSession, так как приложение продолжает падать досеанс захвата даже запущен.Я получаю «Приложение остановлено из-за проблемы с памятью», как только я пытаюсь запустить сеанс камеры.

Я пробовал несколько исправлений по всему Google и Stackoverflow, но ни одно из них, похоже, не работает для меня.Моим текущим устройством, которое я использую для отладки, является iPhone 7 Plus под управлением iOS 12.3.1.

Ниже файлов основного кода для запуска AVCaptureSession и чтения вывода:

.H Файл:

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

NS_ASSUME_NONNULL_BEGIN

@interface CameraViewController : UIViewController
- (void)setDeviceSize:(CGFloat)width height:(CGFloat)height x:(CGFloat)x y:(CGFloat)y;
- (void)startCameraSession:(void(^)(BOOL started))completion;
- (void)startCameraCaptureSession;
@end

NS_ASSUME_NONNULL_END

.M Файл:

#import "CameraViewController.h"
#import "UIUtilities.h"

@import AVFoundation;
@import CoreVideo;

@import FirebaseMLVision;
@import FirebaseMLCommon;

NS_ASSUME_NONNULL_BEGIN

static NSString *const videoDataOutputQueueLabel = @"com.google.firebaseml.visiondetector.VideoDataOutputQueue";
static NSString *const sessionQueueLabel = @"com.google.firebaseml.visiondetector.SessionQueue";
static NSString *const noResultsMessage = @"No Results";

static const CGFloat FIRconstantScale = 1.0;

static BOOL isBusyProcessing = false;
static BOOL shouldScanAndProcess = false;

static BOOL isCameraOutputFinish = false;
static BOOL isCameraInputFinish = false;
static BOOL isCameraSetupFinish = false;

static CGFloat deviceX = (CGFloat)0;
static CGFloat deviceY = (CGFloat)0;
static CGFloat deviceHeight = (CGFloat)0;
static CGFloat deviceWidth = (CGFloat)0;

static int startupActionsCompleted = 0;
static dispatch_queue_t backgroundQueue;

@interface CameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic, nonnull) AVCaptureVideoPreviewLayer *previewLayer;
@property (nonatomic) AVCaptureSession *captureSession;
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) FIRVision *vision;
@property (nonatomic) UIImageView *previewOverlayView;
@property (nonatomic) UIView *cameraView;
@property (nonatomic) CMSampleBufferRef lastFrame;

@end

@implementation CameraViewController

- (CameraViewController*)init{
    backgroundQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
    return self;
}

- (void)setDeviceSize:(CGFloat)width height:(CGFloat)height x:(CGFloat)x y:(CGFloat)y {
    deviceWidth = width;
    deviceHeight = height;
    deviceX = x;
    deviceY = y;
}

- (void)startCameraSession:(void(^)(BOOL started))completion{
    dispatch_async(dispatch_get_main_queue(), ^{
        _cameraView = self.view;
        isCameraInputFinish = false;
        isCameraOutputFinish = false;
        isCameraSetupFinish = false;
        _captureSession = [[AVCaptureSession alloc] init];
        _sessionQueue = dispatch_queue_create(sessionQueueLabel.UTF8String, nil);
        _vision = [FIRVision vision];
        _previewOverlayView = [[UIImageView alloc] initWithFrame:CGRectMake(deviceX, deviceY, deviceWidth, deviceHeight)];

        startupActionsCompleted = 0;
        self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];
        [self setUpPreviewOverlayView:completion];
        [self setUpCaptureSessionOutput:completion];
        [self setUpCaptureSessionInput:completion];
    });
}
- (void)startCameraCaptureSession{
    [self startSession];
}

- (void)viewDidLoad {
  [super viewDidLoad];
}

- (void)viewDidAppear:(BOOL)animated {
  [super viewDidAppear:animated];
}

- (void)viewDidDisappear:(BOOL)animated {
  [super viewDidDisappear:animated];
  [self stopSession];
}

- (void)viewDidLayoutSubviews {
  [super viewDidLayoutSubviews];
  _previewLayer.frame = _cameraView.frame;
}

#pragma mark - Private

- (void)setUpCaptureSessionOutput:(void(^)(BOOL started))completion {
  dispatch_async(_sessionQueue, ^{
      [self->_captureSession beginConfiguration];
      self->_captureSession.sessionPreset = AVCaptureSessionPresetMedium;

      AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
      output.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]};
      dispatch_queue_t outputQueue = dispatch_queue_create(videoDataOutputQueueLabel.UTF8String, nil);
      [output setSampleBufferDelegate:self queue:outputQueue];
      if ([self.captureSession canAddOutput:output]) {
          [self.captureSession addOutput:output];
          [self.captureSession commitConfiguration];
      } else {
          NSLog(@"%@", @"Failed to add capture session output.");
      }
      startupActionsCompleted++;
      isCameraOutputFinish = true;
      if(isCameraInputFinish) {
          isCameraSetupFinish = true;
      }
      if (startupActionsCompleted >= 3) {
          dispatch_async(backgroundQueue, ^{
              completion(true);
          });
      }
  });
}

- (void)setUpCaptureSessionInput:(void(^)(BOOL started))completion {
  dispatch_async(_sessionQueue, ^{
      AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionBack;
      AVCaptureDevice *device = [self captureDeviceForPosition:cameraPosition];
      if (device) {
          [self->_captureSession beginConfiguration];
          NSArray<AVCaptureInput *> *currentInputs = self.captureSession.inputs;
          for (AVCaptureInput *input in currentInputs) {
              [self.captureSession removeInput:input];
          }
          NSError *error;
          AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
          if (error) {
              NSLog(@"Failed to create capture device input: %@", error.localizedDescription);
              return;
          } else {
              if ([self.captureSession canAddInput:input]) {
                  [self.captureSession addInput:input];
              } else {
                  NSLog(@"%@", @"Failed to add capture session input.");
              }
          }
          [self.captureSession commitConfiguration];
      } else {
          NSLog(@"Failed to get capture device for camera position: %ld", cameraPosition);
      }
      startupActionsCompleted++;
      isCameraInputFinish = true;
      if (isCameraOutputFinish) {
          isCameraSetupFinish = true;
      }
      if (startupActionsCompleted >= 3) {
          dispatch_async(backgroundQueue, ^{
              completion(true);
          });
      }
  });
}

- (void)startSession {
  dispatch_async(_sessionQueue, ^{
    [self->_captureSession startRunning];
  });
}

- (void)stopSession {
  dispatch_async(_sessionQueue, ^{
    [self->_captureSession stopRunning];
  });
}

- (void)setUpPreviewOverlayView:(void(^)(BOOL started))completion {
  [_cameraView addSubview:_previewOverlayView];
    startupActionsCompleted++;
    if (startupActionsCompleted >= 3) {
        dispatch_async(backgroundQueue, ^{
            completion(true);
        });
    }
}

- (void)recognizeTextOnDeviceInImage:(FIRVisionImage *)image width:

(CGFloat)width height:(CGFloat)height {
  FIRVisionTextRecognizer *textRecognizer = [_vision onDeviceTextRecognizer];
  dispatch_group_t group = dispatch_group_create();
  dispatch_group_enter(group);
  [textRecognizer processImage:image completion:^(FIRVisionText * _Nullable text, NSError * _Nullable error) {
    isBusyProcessing = false;
    if (text == nil) {
      NSLog(@"On-Device text recognizer error: %@", error ? error.localizedDescription : noResultsMessage);
      dispatch_group_leave(group);
      return;
    }
    for (FIRVisionTextBlock *block in text.blocks) {
      NSArray<NSValue *> *points = [self convertedPointsFromPoints:block.cornerPoints width:width height:height];

      for (FIRVisionTextLine *line in block.lines) {
        NSArray<NSValue *> *points = [self  convertedPointsFromPoints:line.cornerPoints width:width height:height];

        for (FIRVisionTextElement *element in line.elements) {
          CGRect normalizedRect = CGRectMake(element.frame.origin.x / width, element.frame.origin.y / height, element.frame.size.width / width, element.frame.size.height / height);
          CGRect convertedRect = [self->_previewLayer rectForMetadataOutputRectOfInterest:normalizedRect];
          UILabel *label = [[UILabel alloc] initWithFrame:convertedRect];
          label.text = element.text;
          label.adjustsFontSizeToFitWidth = YES;
        }
      }
    }
    dispatch_group_leave(group);
  }];
  dispatch_group_wait(group, DISPATCH_TIME_FOREVER);
}

- (AVCaptureDevice *)captureDeviceForPosition:(AVCaptureDevicePosition)position  {
  if (@available(iOS 10, *)) {
    AVCaptureDeviceDiscoverySession *discoverySession =
      [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]
                                                             mediaType:AVMediaTypeVideo
                                                             position:AVCaptureDevicePositionUnspecified];
    for (AVCaptureDevice *device in discoverySession.devices) {
      if (device.position == position) {
        return device;
      }
    }
  }
  return nil;
}

- (void)updatePreviewOverlayView {
    dispatch_sync(dispatch_get_main_queue(), ^{
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(_lastFrame);
        if (imageBuffer == nil) {
            return;
        }
        CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
        CIContext *context = [[CIContext alloc] initWithOptions:nil];
        CGImageRef cgImage = [context createCGImage:ciImage fromRect:ciImage.extent];
        if (cgImage == nil) {
            return;
        }
        UIImage *rotatedImage = [UIImage imageWithCGImage:cgImage scale:FIRconstantScale orientation:UIImageOrientationRight];
        _previewOverlayView.image = rotatedImage;
        CGImageRelease(cgImage);
    });
}

- (NSArray <NSValue *>*)convertedPointsFromPoints:(NSArray<NSValue *> *)points
                                            width:(CGFloat)width
                                           height:(CGFloat)height {
  NSMutableArray *result = [NSMutableArray arrayWithCapacity:points.count];
  for (NSValue *point in points) {
    CGPoint cgPointValue = point.CGPointValue;
    CGPoint normalizedPoint = CGPointMake(cgPointValue.x / width, cgPointValue.y / height);
    CGPoint cgPoint = [_previewLayer pointForCaptureDevicePointOfInterest:normalizedPoint];
    [result addObject: [NSValue valueWithCGPoint:cgPoint]];
  }
  return result;
}

- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    if (isCameraSetupFinish) {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        if (imageBuffer) {
            _lastFrame = sampleBuffer;
            [self updatePreviewOverlayView];

            if (!isBusyProcessing && shouldScanAndProcess) {
                isBusyProcessing = true;
                FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithBuffer:sampleBuffer];
                FIRVisionImageMetadata *metadata = [[FIRVisionImageMetadata alloc] init];
                UIImageOrientation orientation = [UIUtilities imageOrientationFromDevicePosition:AVCaptureDevicePositionBack];

                FIRVisionDetectorImageOrientation visionOrientation = [UIUtilities visionImageOrientationFromImageOrientation:orientation];
                metadata.orientation = visionOrientation;
                visionImage.metadata = metadata;
                CGFloat imageWidth = CVPixelBufferGetWidth(imageBuffer);
                CGFloat imageHeight = CVPixelBufferGetHeight(imageBuffer);
                dispatch_async(_sessionQueue, ^{
                    [self recognizeTextOnDeviceInImage:visionImage width:imageWidth height:imageHeight];
                });
            }
        } else {
            NSLog(@"%@", @"Failed to get image buffer from sample buffer.");
        }

        // Had To Remove this as it caused bad memory errors...
        //CFRelease(sampleBuffer);
        //CFRelease(imageBuffer);
    }
}

@end

NS_ASSUME_NONNULL_END

Код вызова:

- (void)startScanner:(CDVInvokedUrlCommand*)command
{
    CDVPluginResult *pluginResult;

    if (self.cameraViewController != nil) {
        pluginResult = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsString:@"Scanner Started"];
        [self.commandDelegate sendPluginResult:pluginResult callbackId:command.callbackId];
        return;
    }

    CGFloat x = (CGFloat)((Float32)0) + self.webView.frame.origin.x;
    CGFloat y = (CGFloat)((Float32)0) + self.webView.frame.origin.y;
    CGFloat width = (CGFloat)[command.arguments[1] floatValue];
    CGFloat height = (CGFloat)[command.arguments[0] floatValue];

    CGRect frameRect = CGRectMake(x, y, width, height);
    self.cameraViewController = [[CameraViewController alloc] init];

    [self.cameraViewController setDeviceSize:width height:height x:x y:y];
    self.cameraViewController.view.frame = frameRect;

    [self.viewController addChildViewController:self.cameraViewController];

    self.webView.opaque = NO;
    self.webView.backgroundColor = [UIColor clearColor];

    [self.webView.superview addSubview:self.cameraViewController.view];
    [self.webView.superview bringSubviewToFront:self.webView];

    // Start Camera and wait for completion
    [self.cameraViewController startCameraSession:^(BOOL started){
        // Memory issues start before I even call the "startRunning" method
        [self.cameraViewController startCameraCaptureSession];
        [self.commandDelegate sendPluginResult:[CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsString:@"Scanner Started"] callbackId:command.callbackId];
    }];
}

Я закомментировал почти все в файле .M (setUpPreviewOverlayView, setUpCaptureSessionOutput, setUpCaptureSessionInput) таким образом, это может только указать мне, в каком направлении я вызываю AVCaptureSession неправильно?

Любая помощь будет принята с благодарностью:)

...