【问题标题】:Make Video From Image iOS从 iOS 图像制作视频
【发布时间】:2015-06-15 14:48:25
【问题描述】:

我从这个 SO 问题 Screen capture video in iOS programmatically 中找到了本教程 http://codethink.no-ip.org/wordpress/archives/673#comment-118063 如何做这样的事情,它对于 iOS 来说有点过时了,所以我更新了它,并且非常接近它的工作,但是把UIImages 一起现在还不能很好地工作。

这是我在 viewDidLoad 中调用方法的方式

[captureView performSelector:@selector(startRecording) withObject:nil afterDelay:1.0];
                            [captureView performSelector:@selector(stopRecording) withObject:nil afterDelay:5.0];

captureView 是一个连接到我的视图的 IBOutlet。

然后我有类 ScreenCapture.h & .m

这里是.h

@protocol ScreenCaptureViewDelegate <NSObject>
- (void) recordingFinished:(NSString*)outputPathOrNil;
@end

@interface ScreenCaptureView : UIView {
    //video writing
    AVAssetWriter *videoWriter;
    AVAssetWriterInput *videoWriterInput;
    AVAssetWriterInputPixelBufferAdaptor *avAdaptor;

    //recording state
    BOOL _recording;
    NSDate* startedAt;
    void* bitmapData;
}

//for recording video
- (bool) startRecording;
- (void) stopRecording;

//for accessing the current screen and adjusting the capture rate, etc.
@property(retain) UIImage* currentScreen;
@property(assign) float frameRate;
@property(nonatomic, assign) id<ScreenCaptureViewDelegate> delegate;

@end

这是我的 .m

@interface ScreenCaptureView(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
@end

@implementation ScreenCaptureView

@synthesize currentScreen, frameRate, delegate;

- (void) initialize {
    // Initialization code
    self.clearsContextBeforeDrawing = YES;
    self.currentScreen = nil;
    self.frameRate = 10.0f;     //10 frames per seconds
    _recording = false;
    videoWriter = nil;
    videoWriterInput = nil;
    avAdaptor = nil;
    startedAt = nil;
    bitmapData = NULL;
}

- (id) initWithCoder:(NSCoder *)aDecoder {
    self = [super initWithCoder:aDecoder];
    if (self) {
        [self initialize];
    }
    return self;
}

- (id) init {
    self = [super init];
    if (self) {
        [self initialize];
    }
    return self;
}

- (id)initWithFrame:(CGRect)frame {
    self = [super initWithFrame:frame];
    if (self) {
        [self initialize];
    }
    return self;
}

- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
    CGContextRef    context = NULL;
    CGColorSpaceRef colorSpace;
    int             bitmapByteCount;
    int             bitmapBytesPerRow;

    bitmapBytesPerRow   = (size.width * 4);
    bitmapByteCount     = (bitmapBytesPerRow * size.height);
    colorSpace = CGColorSpaceCreateDeviceRGB();
    if (bitmapData != NULL) {
        free(bitmapData);
    }
    bitmapData = malloc( bitmapByteCount );
    if (bitmapData == NULL) {
        fprintf (stderr, "Memory not allocated!");
        return NULL;
    }

    context = CGBitmapContextCreate (bitmapData,
                                     size.width,
                                     size.height,
                                     8,      // bits per component
                                     bitmapBytesPerRow,
                                     colorSpace,
                                     (CGBitmapInfo) kCGImageAlphaNoneSkipFirst);

    CGContextSetAllowsAntialiasing(context,NO);
    if (context== NULL) {
        free (bitmapData);
        fprintf (stderr, "Context not created!");
        return NULL;
    }
    CGColorSpaceRelease( colorSpace );

    return context;
}

static int frameCount = 0;            //debugging
- (void) drawRect:(CGRect)rect {
    NSDate* start = [NSDate date];
    CGContextRef context = [self createBitmapContextOfSize:self.frame.size];

    //not sure why this is necessary...image renders upside-down and mirrored
    CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
    CGContextConcatCTM(context, flipVertical);

    [self.layer renderInContext:context];

    CGImageRef cgImage = CGBitmapContextCreateImage(context);
    UIImage* background = [UIImage imageWithCGImage: cgImage];
    CGImageRelease(cgImage);

    self.currentScreen = background;

    //debugging
    if (frameCount < 40) {
          NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount];
          NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
          [UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
          frameCount++;
    }

    //NOTE:  to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
    //       'setNeedsDisplay' on the ScreenCaptureView.
    if (_recording) {
        float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
        [self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
    }

    float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
    float delayRemaining = (1.0 / self.frameRate) - processingSeconds;

    CGContextRelease(context);

    //redraw at the specified framerate
    [self performSelector:@selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}

- (void) cleanupWriter {
    avAdaptor = nil;

    videoWriterInput = nil;

    videoWriter = nil;

    startedAt = nil;

    if (bitmapData != NULL) {
        free(bitmapData);
        bitmapData = NULL;
    }
}

- (void)dealloc {
    [self cleanupWriter];
}

- (NSURL*) tempFileURL {
    NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mp4"];
    NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
    NSFileManager* fileManager = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:outputPath]) {
        NSError* error;
        if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
            NSLog(@"Could not delete old recording file at path:  %@", outputPath);
        }
    }

    return outputURL;
}

-(BOOL) setUpWriter {
    NSError* error = nil;
    videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
    NSParameterAssert(videoWriter);

    //Configure video
    NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
                                           [NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
                                           nil ];

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
                                   videoCompressionProps, AVVideoCompressionPropertiesKey,
                                   nil];

    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = YES;
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];

    //add input
    [videoWriter addInput:videoWriterInput];
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];

    return YES;
}

- (void) completeRecordingSession {

    [videoWriterInput markAsFinished];

    // Wait for the video
    int status = videoWriter.status;
    while (status == AVAssetWriterStatusUnknown) {
        NSLog(@"Waiting...");
        [NSThread sleepForTimeInterval:0.5f];
        status = videoWriter.status;
    }

    @synchronized(self) {




       [videoWriter finishWritingWithCompletionHandler:^{

           [self cleanupWriter];
           BOOL success = YES;
           id delegateObj = self.delegate;
           NSString *outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mp4"];
           NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];

           NSLog(@"Completed recording, file is stored at:  %@", outputURL);
           if ([delegateObj respondsToSelector:@selector(recordingFinished:)]) {
               [delegateObj performSelectorOnMainThread:@selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
           }


       }];


    }

}

- (bool) startRecording {
    bool result = NO;
    @synchronized(self) {
        if (! _recording) {
            result = [self setUpWriter];
            startedAt = [NSDate date];
            _recording = true;
        }
    }

    return result;
}

- (void) stopRecording {
    @synchronized(self) {
        if (_recording) {
            _recording = false;
            [self completeRecordingSession];
        }
    }
}

-(void) writeVideoFrameAtTime:(CMTime)time {
    if (![videoWriterInput isReadyForMoreMediaData]) {
        NSLog(@"Not ready for video data");
    }
    else {
        @synchronized (self) {
            UIImage *newFrame = self.currentScreen;
            CVPixelBufferRef pixelBuffer = NULL;
            CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
            CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));

            int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
            if(status != 0){
                //could not get a buffer from the pool
                NSLog(@"Error creating pixel buffer:  status=%d", status);
            }
            // set image data into pixel buffer
            CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
            uint8_t *destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
            CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);  //XXX:  will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data

            if(status == 0){
                BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
                if (!success)
                    NSLog(@"Warning:  Unable to write buffer to video");
            }

            //clean up
            CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
            CVPixelBufferRelease( pixelBuffer );
            CFRelease(image);
            CGImageRelease(cgImage);
        }

    }

}

正如您在drawRect 方法中看到的那样,我保存了所有图像,它们看起来很棒,但是当我尝试制作视频时,它只会创建一个看起来像这样的静止图像,当图像看起来像这样。

这是输出,它是一个视频,但仅此而已。当图片看起来正常时(不倾斜,很奇怪)

我的问题是制作视频时出了什么问题?

感谢您的帮助和您的时间,我知道这是一个很长的问题。

【问题讨论】:

标签: ios objective-c video avfoundation


【解决方案1】:

当我想从 CGImageRef(来自 UIImage)创建 CVPixelBufferRef(来自 UIImage)时,由于某些分辨率导致完全相同的视频效果,我发现了这篇文章。

就我而言,非常简短的回答是,我已将每行的字节数硬连接为宽度的 4 倍。以前一直在工作!现在我查询 CVPixelBuffer 本身来获取这个值和 poof,问题解决了!

产生问题的代码是这样的:

CGContextRef context = CGBitmapContextCreate(pxdata, w, h, 8, 4*w, rgbColorSpace, bitMapInfo);

解决问题的代码如下:

    CGContextRef context = CGBitmapContextCreate(
                                             pxdata, w, h,
                                             8, CVPixelBufferGetBytesPerRow(pxbuffer),
                                             rgbColorSpace,bitMapInfo);

在这两种情况下,都设置了 bitMapInfo:

GBitmapInfo bitMapInfo =kCGImageAlphaPremultipliedFirst; // According to Apple's doc, this is safe:  June 26, 2014

【讨论】:

    【解决方案2】:

    像素缓冲区适配器仅适用于某些像素大小的图像。您可能需要更改图像的大小。您可以想象,您的视频中发生的事情是作者试图将您的 361x241 图像写入 360x240 大小的空间。每行都从最后一行的最后一个像素开始,因此它最终会像您看到的那样对角倾斜。检查苹果文档以获取支持的尺寸。我相信我使用了 480x320 并且它是受支持的。您可以使用此方法调整图像大小:

    +(UIImage *)scaleImage:(UIImage*)image toSize:(CGSize)newSize {
    
        CGRect scaledImageRect = CGRectZero;
    
        CGFloat aspectWidth = newSize.width / image.size.width;
        CGFloat aspectHeight = newSize.height / image.size.height;
        CGFloat aspectRatio = 3.0 / 2;
    
        scaledImageRect.size.width = image.size.width * aspectRatio;
        scaledImageRect.size.height = image.size.height * aspectRatio;
        scaledImageRect.origin.x = (newSize.width - scaledImageRect.size.width) / 2.0f;
        scaledImageRect.origin.y = (newSize.height - scaledImageRect.size.height) / 2.0f;
    
        UIGraphicsBeginImageContextWithOptions(CGSizeMake(480, 320), NO, 0 );
        [image drawInRect:scaledImageRect];
        UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();
    
        return scaledImage;
    }
    

    【讨论】:

    • 非常感谢您提供的方法,当我登录pixelBuffer 时,我得到了&lt;CVPixelBuffer 0x174134be0 width=375 height=667 bytesPerRow=1536 pixelFormat=32,那么我想在您的方法中将 CGSize 更改为什么?
    • //改成这个:UIImage *newFrame = self.currentScreen; //改成这个:UIImage *newFrame = [self scaleImage:self.currentScreen toSize:CGSizeMake(480,320)];
    • 是的,这就是我所做的,你可以自己尝试,它会抛出一个很难解释的错误,这里是项目github.com/spennyf/cropVid,只需取消注释 ViewController.m 中的第 81 行和第 82 行即可让它尝试记录,谢谢
    • 嗨,Alex,你能看看我的意思吗?
    • 很遗憾我不能,因为接下来的几天我很忙。我也没有图像,所以我很难测试。你能至少告诉我错误是什么吗?
    【解决方案3】:

    我认为这是因为每行的 pixelBuffer 字节与每行的 UIImage 字节不匹配。就我而言(iPhone 6 iOS8.3),UIImage 为 568 x 320,CFDataGetLength 为 727040,因此每行的字节数为 2272。但每行的 pixelBuffer 字节数为 2304。我认为这额外的 32 个字节来自填充,因此字节pixelBuffer 中的每行可被 64 整除。我不确定如何在所有设备上强制 pixelBuffer 匹配输入数据,反之亦然。

    【讨论】:

    • 将 bitmapBytesPerRow 设置为 (CGContextRef) createBitmapContextOfSize:(CGSize) 中每行的 pixelBuffer 字节数适用于所有设备。
    【解决方案4】:

    在这种情况下,我遭受了很多痛苦。我尝试了很多方法来从 Image 数组创建视频,但结果几乎和你的一样。

    问题出在 CVPixel 缓冲区中。我用来从图像创建的缓冲区不正确。

    但最后我得到了它的工作。

    从数组的 url 创建视频的主函数

    您只需输入图像数组和 fps,大小可以等于图像的大小(如果需要)。 fps = 数组中的图像数量/所需的持续时间

    例如:fps = 90 / 3 = 30

    - (void)getVideoFrom:(NSArray *)array
                  toPath:(NSString*)path
                    size:(CGSize)size
                     fps:(int)fps
       withCallbackBlock:(void (^) (BOOL))callbackBlock
    {
        NSLog(@"%@", path);
        NSError *error = nil;
        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                               fileType:AVFileTypeMPEG4
                                                                  error:&error];
        if (error) {
            if (callbackBlock) {
                callbackBlock(NO);
            }
            return;
        }
        NSParameterAssert(videoWriter);
    
        NSDictionary *videoSettings = @{AVVideoCodecKey: AVVideoCodecTypeH264,
                                        AVVideoWidthKey: [NSNumber numberWithInt:size.width],
                                        AVVideoHeightKey: [NSNumber numberWithInt:size.height]};
    
        AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                             outputSettings:videoSettings];
    
        AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                         sourcePixelBufferAttributes:nil];
        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        [videoWriter addInput:writerInput];
    
        //Start a session:
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];
    
        CVPixelBufferRef buffer;
        CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
    
        CMTime presentTime = CMTimeMake(0, fps);
    
        int i = 0;
        while (1)
        {
    
            if(writerInput.readyForMoreMediaData){
    
                presentTime = CMTimeMake(i, fps);
    
                if (i >= [array count]) {
                    buffer = NULL;
                } else {
                    buffer = [self pixelBufferFromCGImage:[array[i] CGImage] size:CGSizeMake(480, 320)];
                }
    
                if (buffer) {
                    //append buffer
    
                    BOOL appendSuccess = [self appendToAdapter:adaptor
                                                   pixelBuffer:buffer
                                                        atTime:presentTime
                                                     withInput:writerInput];
                    NSAssert(appendSuccess, @"Failed to append");
    
    
                    i++;
                } else {
    
                    //Finish the session:
                    [writerInput markAsFinished];
    
                    [videoWriter finishWritingWithCompletionHandler:^{
                        NSLog(@"Successfully closed video writer");
                        if (videoWriter.status == AVAssetWriterStatusCompleted) {
                            if (callbackBlock) {
                                callbackBlock(YES);
                            }
                        } else {
                            if (callbackBlock) {
                                callbackBlock(NO);
                            }
                        }
                    }];
    
                    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
    
                    NSLog (@"Done");
                    break;
                }
            }
        }
    }
    

    从CGImage获取CVPixelBuffer的函数

    -(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image size:(CGSize)imageSize
    {
    
        NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                                 nil];
    
        CVPixelBufferRef pxbuffer = NULL;
        CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
                            CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                            &pxbuffer);
        CVPixelBufferLockBaseAddress(pxbuffer, 0);
    
        void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    
        CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
                                                     CGImageGetHeight(image), 8, CVPixelBufferGetBytesPerRow(pxbuffer), rgbColorSpace,
                                                     (int)kCGImageAlphaNoneSkipFirst);
    
    
        CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    
        CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
        CGColorSpaceRelease(rgbColorSpace);
        CGContextRelease(context);
        CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    
        return pxbuffer;
    }
    

    附加到适配器的函数

    -(BOOL)appendToAdapter:(AVAssetWriterInputPixelBufferAdaptor*)adaptor
               pixelBuffer:(CVPixelBufferRef)buffer
                    atTime:(CMTime)presentTime
                 withInput:(AVAssetWriterInput*)writerInput
    {
        while (!writerInput.readyForMoreMediaData) {
            usleep(1);
        }
    
        return [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
    }
    

    【讨论】:

      猜你喜欢
      • 1970-01-01
      • 2018-10-28
      • 2016-10-18
      • 1970-01-01
      • 2016-04-03
      • 2018-08-13
      • 1970-01-01
      • 2013-01-03
      • 2012-12-23
      相关资源
      最近更新 更多