【问题标题】:Ios rotate, filter video stream in iosios旋转,过滤ios中的视频流
【发布时间】:2017-02-26 01:13:22
【问题描述】:

您好,我正在通过 GPUImage 在视频直播中旋转和应用图像过滤器 该任务花费的时间比预期的要多,导致 iPhone 过热 谁能帮我优化我的代码 以下是我使用的代码:

- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{
    //return if invalid sample buffer
    if (!CMSampleBufferIsValid(sampleBuffer)) {
        return;
    }

    //Get CG Image from sample buffer
    CGImageRef cgImageFromBuffer = [self cgImageFromSampleBuffer:sampleBuffer];
    if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){
        return;
    }

    //We need rotation to perform
    UIImage *rotatedPlainImage = [UIUtils rotateImage:[UIImage imageWithCGImage:cgImageFromBuffer] byDegree:90];
    if (rotatedPlainImage == nil) {
        CFRelease(cgImageFromBuffer);
        return;
    }

    //Apply image filter using GPU Image on CGImage
    CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage];

    //Convert back in CMSamplbuffer
    CMSampleBufferRef outputBufffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer];

    //Pass to custom encode of Red5Pro to server for live stream
    [self.encoder encodeFrame:outputBufffer ofType:r5_media_type_video_custom];

    //Release data if needed
    CFRelease(outputBufffer);
    CFRelease(filteredCGImage);
    CFRelease(cgImageFromBuffer);
}


- (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);        // Lock the image buffer

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);   // Get information of the image
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    CGContextRelease(newContext);

    CGColorSpaceRelease(colorSpace);
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    /* CVBufferRelease(imageBuffer); */  // do not call this!

    return newImage;
}

- (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{
    CIImage *nm = [CIImage imageWithCGImage:imageRef];
    CVPixelBufferRef pixelBuffer;
    CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)nm.extent.size.width, (size_t)nm.extent.size.height, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);
    CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
    CIContext *ciContext = [CIContext contextWithOptions: nil];
    [ciContext render:nm toCVPixelBuffer:pixelBuffer];
    CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
    CMSampleTimingInfo sampleTime = {
        .duration = CMSampleBufferGetDuration(sampleBuffer),
        .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
        .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
    };
    CMVideoFormatDescriptionRef videoInfo = NULL;
    CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
    CMSampleBufferRef oBuf;
    CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
    CVPixelBufferRelease(pixelBuffer);
    CFRelease(videoInfo);
    return oBuf;
}

【问题讨论】:

    标签: ios video-streaming avfoundation gpuimage red5pro


    【解决方案1】:
    NSLog(@"start rotate");
    CFAbsoluteTime t0 = CFAbsoluteTimeGetCurrent();
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *ciimage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
    CFAbsoluteTime t1 = CFAbsoluteTimeGetCurrent();
    NSLog(@"dur to ciimage: %@", @(t1-t0));
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    CIImage *newImage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight];
    CFAbsoluteTime t2 = CFAbsoluteTimeGetCurrent();
    NSLog(@"dur rotate ciimage: %@", @(t2-t1));
    
    CVPixelBufferRef newPixcelBuffer = nil;
    size_t width                        = CVPixelBufferGetWidth(pixelBuffer);
    size_t height                       = CVPixelBufferGetHeight(pixelBuffer);
    
    CVPixelBufferCreate(kCFAllocatorDefault, height, width, kCVPixelFormatType_32BGRA, nil, &newPixcelBuffer);
    CFAbsoluteTime t3 = CFAbsoluteTimeGetCurrent();
    NSLog(@"dur alloc pixel: %@", @(t3-t2));
    [_ciContext render:newImage toCVPixelBuffer:newPixcelBuffer];
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    CFAbsoluteTime t4 = CFAbsoluteTimeGetCurrent();
    NSLog(@"dur render pixel: %@", @(t4-t3));
    
    //
    CMSampleTimingInfo sampleTimingInfo = {
        .duration = CMSampleBufferGetDuration(sampleBuffer),
        .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
        .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
    };
    
    //
    CMVideoFormatDescriptionRef videoInfo = nil;
    CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, newPixcelBuffer, &videoInfo);
    
    CMSampleBufferRef newSampleBuffer = nil;
    CMSampleBufferCreateForImageBuffer(kCFAllocatorMalloc, newPixcelBuffer, true, nil, nil, videoInfo, &sampleTimingInfo, &newSampleBuffer);
    
    CFAbsoluteTime t5 = CFAbsoluteTimeGetCurrent();
    NSLog(@"dur create CMSample: %@", @(t5-t4));
    // release
    CVPixelBufferRelease(newPixcelBuffer);
    CFAbsoluteTime t6 = CFAbsoluteTimeGetCurrent();
    NSLog(@"dur end rotate: %@", @(t6-t0));
    return newSampleBuffer;
    

    【讨论】:

    • 请解释您的代码行,以便其他用户了解其功能。谢谢!
    【解决方案2】:

    我使用了 OpenGL 2.0 和 Accelerate Framework

    加速框架旋转CMSampleBuffer

    现在没有过滤器的时间是 3 - 8 毫秒

    使用过滤器需要 7-21 毫秒

    OpenGL 让 CI 图像在 CVPixelBuffer 上快速渲染

    @implementation ColorsVideoSource{
        CIContext *coreImageContext;
    }
    
    - (instancetype)init{
        if((self = [super init]) != nil){
    
            EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
            GLKView *glView = [[GLKView alloc] initWithFrame:CGRectMake(0.0, 0.0, 360.0, 480.0) context:glContext];
            coreImageContext = [CIContext contextWithEAGLContext:glView.context];
    
        }
    
        return self;
    }
    
    - (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{
        if (!CMSampleBufferIsValid(sampleBuffer)) {
            return;
        }
        CVPixelBufferRef rotateBuffer = [self correctBufferOrientation:sampleBuffer];
        CGImageRef cgImageFromBuffer = [self cgImageFromImageBuffer:rotateBuffer];
        if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){
            return;
        }
    
        UIImage *rotatedPlainImage = [UIImage imageWithCGImage:cgImageFromBuffer];
    
        if (rotatedPlainImage == nil) {
            CFRelease(rotateBuffer);
            CFRelease(cgImageFromBuffer);
            return;
        }
    
        if (_currentFilterType == SWPublisherFilterNone) {
            if (_needPreviewImage) {
                _previewImage = rotatedPlainImage;
            }
            CMSampleTimingInfo sampleTime = {
                .duration = CMSampleBufferGetDuration(sampleBuffer),
                .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
                .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
            };
            CMVideoFormatDescriptionRef videoInfo = NULL;
            CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, &videoInfo);
            CMSampleBufferRef oBuf;
            CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
            CFRelease(videoInfo);
    
            if(!self.pauseEncoding){
                @try {
                    [self.encoder encodeFrame:oBuf ofType:r5_media_type_video_custom];
                } @catch (NSException *exception) {
                    NSLog(@"Encoder error: %@", exception);
                }
            }
            CFRelease(oBuf);
    
        }
        else {
    
            CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage];
            if (_needPreviewImage) {
                _previewImage = [UIImage imageWithCGImage:filteredCGImage];
            }
            CMSampleBufferRef outputBuffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer];
            if(!self.pauseEncoding){
                @try {
                    [self.encoder encodeFrame:outputBuffer ofType:r5_media_type_video_custom];
                } @catch (NSException *exception) {
                    NSLog(@"Encoder error: %@", exception);
                }
            }
            CFRelease(outputBuffer);
            CFRelease(filteredCGImage);
    
        }
        CFRelease(rotateBuffer);
        CFRelease(cgImageFromBuffer);
    }
    
    #pragma mark - Methods Refactored GPUImage - Devanshu
    
    - (CVPixelBufferRef)correctBufferOrientation:(CMSampleBufferRef)sampleBuffer
    {
        CVImageBufferRef imageBuffer        = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(imageBuffer, 0);
    
        size_t bytesPerRow                  = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width                        = CVPixelBufferGetWidth(imageBuffer);
        size_t height                       = CVPixelBufferGetHeight(imageBuffer);
        size_t currSize                     = bytesPerRow * height * sizeof(unsigned char);
        size_t bytesPerRowOut               = 4 * height * sizeof(unsigned char);
    
        void *srcBuff                       = CVPixelBufferGetBaseAddress(imageBuffer);
    
        /* rotationConstant:
         *  0 -- rotate 0 degrees (simply copy the data from src to dest)
         *  1 -- rotate 90 degrees counterclockwise
         *  2 -- rotate 180 degress
         *  3 -- rotate 270 degrees counterclockwise
         */
        uint8_t rotationConstant            = 3;
    
        unsigned char *dstBuff              = (unsigned char *)malloc(currSize);
    
        vImage_Buffer inbuff                = {srcBuff, height, width, bytesPerRow};
        vImage_Buffer outbuff               = {dstBuff, width, height, bytesPerRowOut};
    
        uint8_t bgColor[4]                  = {0, 0, 0, 0};
    
        vImage_Error err                    = vImageRotate90_ARGB8888(&inbuff, &outbuff, rotationConstant, bgColor, 0);
        if (err != kvImageNoError) NSLog(@"%ld", err);
    
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    
        CVPixelBufferRef rotatedBuffer      = NULL;
        CVPixelBufferCreateWithBytes(NULL,
                                     height,
                                     width,
                                     kCVPixelFormatType_32BGRA,
                                     outbuff.data,
                                     bytesPerRowOut,
                                     freePixelBufferDataAfterRelease,
                                     NULL,
                                     NULL,
                                     &rotatedBuffer);
    
        return rotatedBuffer;
    }
    
    void freePixelBufferDataAfterRelease(void *releaseRefCon, const void *baseAddress)
    {
        // Free the memory we malloced for the vImage rotation
        free((void *)baseAddress);
    }
    
    
    - (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
    {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        return [self cgImageFromImageBuffer:imageBuffer];
    }
    
    - (CGImageRef)cgImageFromImageBuffer:(CVImageBufferRef) imageBuffer // Create a CGImageRef from sample buffer data
    {
        CVPixelBufferLockBaseAddress(imageBuffer,0);        // Lock the image buffer
    
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);   // Get information of the image
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
        CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);
        CGContextRelease(newContext);
    
        CGColorSpaceRelease(colorSpace);
        CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
        return newImage;
    }
    
    - (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{
        CIImage *theCoreImage = [CIImage imageWithCGImage:imageRef];
    
        CVPixelBufferRef pixelBuffer;
        CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)theCoreImage.extent.size.width, (size_t)theCoreImage.extent.size.height, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);
        CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
    
        [coreImageContext render:theCoreImage toCVPixelBuffer:pixelBuffer];
    
        CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
        CMSampleTimingInfo sampleTime = {
            .duration = CMSampleBufferGetDuration(sampleBuffer),
            .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
            .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
        };
        CMVideoFormatDescriptionRef videoInfo = NULL;
        CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
        CMSampleBufferRef oBuf;
        CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
        CVPixelBufferRelease(pixelBuffer);
        CFRelease(videoInfo);
        return oBuf;
    }
    

    【讨论】:

    • 在录制视频时使用它不会造成内存问题吗?
    • 不,什么都没做,你可以试着释放任何不想要的东西,亲爱的。
    猜你喜欢
    • 1970-01-01
    • 1970-01-01
    • 1970-01-01
    • 2014-11-19
    • 1970-01-01
    • 2023-03-21
    • 2013-10-06
    • 2013-07-16
    • 2016-01-22
    相关资源
    最近更新 更多