• 首页 首页 icon
  • 工具库 工具库 icon
    • IP查询 IP查询 icon
  • 内容库 内容库 icon
    • 快讯库 快讯库 icon
    • 精品库 精品库 icon
    • 问答库 问答库 icon
  • 更多 更多 icon
    • 服务条款 服务条款 icon

GPUImage实现人脸实时识别

武飞扬头像
zhaocarbon
帮助2

最近在研究OC的生物活检方面的实现,发现SDK中自带有相应的功能类,则进行了调研与实现。

实现过程中发现一个比较坑人的一个地方,就是GPUIMAGE这个框架里面对于视频采集使用的YUV格式,而YUV格式无法与OC的类库进行配合实现实时识别。

现在我们来剖析一下GPUImageVideoCamera的实现:

学新通

  1.  
    @interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
  2.  
     
  3.  
     
  4.  
    - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;

可以看到提供了一个初始化方法,此初始化方法内部的代码如下: 

  1.  
    - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
  2.  
    {
  3.  
    if (!(self = [super init]))
  4.  
    {
  5.  
    return nil;
  6.  
    }
  7.  
     
  8.  
    cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
  9.  
    audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
  10.  
     
  11.  
    frameRenderingSemaphore = dispatch_semaphore_create(1);
  12.  
     
  13.  
    _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
  14.  
    _runBenchmark = NO;
  15.  
    capturePaused = NO;
  16.  
    outputRotation = kGPUImageNoRotation;
  17.  
    internalRotation = kGPUImageNoRotation;
  18.  
    captureAsYUV = YES;
  19.  
    _preferredConversion = kColorConversion709;
  20.  
     
  21.  
    // Grab the back-facing or front-facing camera
  22.  
    _inputCamera = nil;
  23.  
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  24.  
    for (AVCaptureDevice *device in devices)
  25.  
    {
  26.  
    if ([device position] == cameraPosition)
  27.  
    {
  28.  
    _inputCamera = device;
  29.  
    }
  30.  
    }
  31.  
     
  32.  
    if (!_inputCamera) {
  33.  
    return nil;
  34.  
    }
  35.  
     
  36.  
    // Create the capture session
  37.  
    _captureSession = [[AVCaptureSession alloc] init];
  38.  
     
  39.  
    [_captureSession beginConfiguration];
  40.  
     
  41.  
    // Add the video input
  42.  
    NSError *error = nil;
  43.  
    videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
  44.  
    if ([_captureSession canAddInput:videoInput])
  45.  
    {
  46.  
    [_captureSession addInput:videoInput];
  47.  
    }
  48.  
     
  49.  
    // Add the video frame output
  50.  
    videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  51.  
    [videoOutput setAlwaysDiscardsLateVideoFrames:NO];
  52.  
     
  53.  
    // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  54.  
    if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
  55.  
    {
  56.  
    BOOL supportsFullYUVRange = NO;
  57.  
    NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
  58.  
    for (NSNumber *currentPixelFormat in supportedPixelFormats)
  59.  
    {
  60.  
    if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
  61.  
    {
  62.  
    supportsFullYUVRange = YES;
  63.  
    }
  64.  
    }
  65.  
     
  66.  
    if (supportsFullYUVRange)
  67.  
    {
  68.  
    [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  69.  
    isFullYUVRange = YES;
  70.  
    }
  71.  
    else
  72.  
    {
  73.  
    [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  74.  
    isFullYUVRange = NO;
  75.  
    }
  76.  
    }
  77.  
    else
  78.  
    {
  79.  
    [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  80.  
    }
  81.  
     
  82.  
    runSynchronouslyOnVideoProcessingQueue(^{
  83.  
     
  84.  
    if (captureAsYUV)
  85.  
    {
  86.  
    [GPUImageContext useImageProcessingContext];
  87.  
    // if ([GPUImageContext deviceSupportsRedTextures])
  88.  
    // {
  89.  
    // yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
  90.  
    // }
  91.  
    // else
  92.  
    // {
  93.  
    if (isFullYUVRange)
  94.  
    {
  95.  
    yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
  96.  
    }
  97.  
    else
  98.  
    {
  99.  
    yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
  100.  
    }
  101.  
     
  102.  
    // }
  103.  
     
  104.  
    if (!yuvConversionProgram.initialized)
  105.  
    {
  106.  
    [yuvConversionProgram addAttribute:@"position"];
  107.  
    [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
  108.  
     
  109.  
    if (![yuvConversionProgram link])
  110.  
    {
  111.  
    NSString *progLog = [yuvConversionProgram programLog];
  112.  
    NSLog(@"Program link log: %@", progLog);
  113.  
    NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
  114.  
    NSLog(@"Fragment shader compile log: %@", fragLog);
  115.  
    NSString *vertLog = [yuvConversionProgram vertexShaderLog];
  116.  
    NSLog(@"Vertex shader compile log: %@", vertLog);
  117.  
    yuvConversionProgram = nil;
  118.  
    NSAssert(NO, @"Filter shader link failed");
  119.  
    }
  120.  
    }
  121.  
     
  122.  
    yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
  123.  
    yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
  124.  
    yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
  125.  
    yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
  126.  
    yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
  127.  
     
  128.  
    [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  129.  
     
  130.  
    glEnableVertexAttribArray(yuvConversionPositionAttribute);
  131.  
    glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
  132.  
    }
  133.  
    });
  134.  
     
  135.  
    [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
  136.  
    if ([_captureSession
学新通

这篇好文章是转载于:学新通技术网

  • 版权申明: 本站部分内容来自互联网,仅供学习及演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系,请提供相关证据及您的身份证明,我们将在收到邮件后48小时内删除。
  • 本站站名: 学新通技术网
  • 本文地址: /boutique/detail/tanhgaback
系列文章
更多 icon
同类精品
更多 icon
继续加载