【问题标题】:Extract Frames from Video C#从视频 C# 中提取帧
【发布时间】:2016-02-13 14:04:14
【问题描述】:

我正在尝试制作一个使用相机录制视频并处理视频图像的应用。这就是我想要的。首先,我的应用使用 Torch 录制了一个 10 秒的视频。其次,我使用一种方法来播放视频以查看我录制的内容。

我被三件事困住了。

  1. 如何将我的视频转换为单独的帧(图像)?
  2. 是否可以在录制视频时异步转换视频?
  3. 当我将视频转换为单独的帧时,如何处理它们?它们是JPEG吗?我可以简单地将它们显示为图像吗?等等。

主要代码:

using System;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;

using Windows.UI.Xaml.Navigation;

namespace App3
{

public sealed partial class MainPage : Page
{          
    DispatcherTimer D;
    double basetimer = 0;
    public MainPage()
    {
        this.InitializeComponent();       
        this.NavigationCacheMode = NavigationCacheMode.Required;
        D = new DispatcherTimer();      
        D.Interval = new TimeSpan(0, 0, 1);
        D.Tick += timer_Tick;
        txt.Text = basetimer.ToString();
        Play.IsEnabled = false;            
    }  
    public Library Library = new Library();
    public object PreviewImage { get; private set; }
    void timer_Tick(object sender, object e)
    {
        basetimer = basetimer - 1;
        txt.Text = basetimer.ToString();
        if (basetimer == 0)
        {
            D.Stop();               
            Preview.Source = null;
            Library.Stop();
            Record.IsEnabled = false;
            Play.IsEnabled = true;
            Clear.IsEnabled = true;
            if (Library._tc.Enabled)
            {
                Library._tc.Enabled = false;
            }                
        }
    }
    private void Record_Click(object sender, RoutedEventArgs e)
    {            
        if (Library.Recording)
        {
            Preview.Source = null;
            Library.Stop();
            Record.Icon = new SymbolIcon(Symbol.Video);                
        }
        else
        {
            basetimer = 11;
            D.Start();
            //D.Tick += timer_Tick;
            Display.Source = null;
            Library.Record(Preview);
            Record.Icon = new SymbolIcon(Symbol.VideoChat);
            Record.IsEnabled = false;
            Play.IsEnabled = false;
        }
    }
    private async void Play_Click(object sender, RoutedEventArgs e)
    {            
        await Library.Play(Dispatcher, Display);
        //Extract_Image_From_Video(Library.buffer);            
    }
    private  void Clear_Click(object sender, RoutedEventArgs e)
    {
        Display.Source = null;            
        Record.Icon = new SymbolIcon(Symbol.Video);
        txt.Text = "0";
        basetimer=  0;
        Play.IsEnabled = false;
        Record.IsEnabled =true;
        if (Library.capture != null)
        {
            D.Stop();
            Library.Recording = false;
            Preview.Source = null;               
            Library.capture.Dispose();
            Library.capture = null;
            basetimer = 11;
        }
        }
    }
}

库类:

using System;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Windows.Devices.Enumeration;
using Windows.Media.Capture;
using Windows.Media.Devices;
using Windows.Media.MediaProperties;
using Windows.Storage;
using Windows.Storage.Streams;
using Windows.UI.Core;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Media.Imaging;
using Windows.Graphics.Imaging;
using Emgu.CV.Structure;
using Emgu.CV;
using System.Collections.Generic;

public class Library
{

private const string videoFilename = "video.mp4";
private string filename;
public MediaCapture capture;
public InMemoryRandomAccessStream buffer;
public static bool Recording;
public TorchControl _tc;
public int basetimer  ;   
public async Task<bool> init()
{
    if (buffer != null)
    {
        buffer.Dispose();
    }
    buffer = new InMemoryRandomAccessStream();
    if (capture != null)
    {
        capture.Dispose();
    }
    try
    {

        if (capture == null)
        {
            var allVideoDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);               
            DeviceInformation cameraDevice =
            allVideoDevices.FirstOrDefault(x => x.EnclosureLocation != null &&
            x.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back);
            capture = new MediaCapture();
            var mediaInitSettings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id };
            // Initialize 
            try
            {
                await capture.InitializeAsync(mediaInitSettings);
                var videoDev = capture.VideoDeviceController;
                _tc = videoDev.TorchControl;
                Recording = false;
                _tc.Enabled = false;                                      
            }
            catch (UnauthorizedAccessException)
            {
                Debug.WriteLine("UnauthorizedAccessExeption>>");
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when initializing MediaCapture with {0}: {1}", cameraDevice.Id, ex.ToString());
            }
        }
            capture.Failed += (MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) =>
        {
            Recording = false;
            _tc.Enabled = false;
            throw new Exception(string.Format("Code: {0}. {1}", errorEventArgs.Code, errorEventArgs.Message));
        };
    }
    catch (Exception ex)
    {
        if (ex.InnerException != null && ex.InnerException.GetType() == typeof(UnauthorizedAccessException))
        {
            throw ex.InnerException;
        }
        throw;
    }
    return true;
}
public async void Record(CaptureElement preview)
{    
    await init();
    preview.Source = capture; 
    await capture.StartPreviewAsync();
    await capture.StartRecordToStreamAsync(MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto), buffer);
    if (Recording) throw new InvalidOperationException("cannot excute two records at the same time");
    Recording = true;
    _tc.Enabled = true;

}
public async void Stop()
{
    await capture.StopRecordAsync();
    Recording = false;
    _tc.Enabled = false;       
}    

public async Task Play(CoreDispatcher dispatcher, MediaElement playback)
{
    IRandomAccessStream video = buffer.CloneStream();

    if (video == null) throw new ArgumentNullException("buffer");
    StorageFolder storageFolder = Windows.ApplicationModel.Package.Current.InstalledLocation;
    if (!string.IsNullOrEmpty(filename))
    {
        StorageFile original = await storageFolder.GetFileAsync(filename);
        await original.DeleteAsync();
    }
    await dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
    {
        StorageFile storageFile = await storageFolder.CreateFileAsync(videoFilename, CreationCollisionOption.GenerateUniqueName);
        filename = storageFile.Name;
        using (IRandomAccessStream fileStream = await storageFile.OpenAsync(FileAccessMode.ReadWrite))
        {
            await RandomAccessStream.CopyAndCloseAsync(video.GetInputStreamAt(0), fileStream.GetOutputStreamAt(0));
            await video.FlushAsync();
            video.Dispose();
        }
        IRandomAccessStream stream = await storageFile.OpenAsync(FileAccessMode.Read);

        playback.SetSource(stream, storageFile.FileType);
        playback.Play();





    });


}   

【问题讨论】:

    标签: c# video image-processing win-universal-app


    【解决方案1】:

    在 Accord 遇到很多麻烦后,我最终使用 MediaToolkit 解决了类似的问题。

    我需要为视频的每一秒保存一张图片:

    using (var engine = new Engine())
    {
        var mp4 = new MediaFile { Filename = mp4FilePath };
    
        engine.GetMetadata(mp4);
    
        var i = 0;
        while (i < mp4.Metadata.Duration.Seconds)
        {
            var options = new ConversionOptions { Seek = TimeSpan.FromSeconds(i) };
            var outputFile = new MediaFile { Filename = string.Format("{0}\\image-{1}.jpeg", outputPath, i) };
            engine.GetThumbnail(mp4, outputFile, options);
            i++;
        }
    }
    

    希望有一天这对某人有所帮助。

    .NET 5 更新:

    最近,我需要更新此代码以在 .NET 5 中工作。为此,我正在使用 MediaToolkit.NetCore,它已经预览了一年多。另请注意:您需要为您的应用提供最新的 ffmpeg,包括所有 3 个 .exe 文件(ffmpeg、ffplay、ffprobe)。

    废话不多说,更新代码如下:

    // _env is the injected IWebHostEnvironment
    // _tempPath is temporary file storage
    var ffmpegPath = Path.Combine(_env.ContentRootPath, "<path-to-ffmpeg.exe>");
    
    var mediaToolkitService = MediaToolkitService.CreateInstance(ffmpegPath);
    var metadataTask = new FfTaskGetMetadata(_tempFile);
    var metadata = await mediaToolkitService.ExecuteAsync(metadataTask);
    
    var i = 0;
    while (i < metadata.Metadata.Streams.First().DurationTs)
    {
        var outputFile = string.Format("{0}\\image-{1:0000}.jpeg", _imageDir, i);
        var thumbTask = new FfTaskSaveThumbnail(_tempFile, outputFile, TimeSpan.FromSeconds(i));
        _ = await mediaToolkitService.ExecuteAsync(thumbTask);
        i++;
    }
    

    【讨论】:

    • 绝对帮助了我!在发表此评论时,MedialToolkit nuget 包不支持 .NET 核心,在 .Net 框架 4.8 上运行良好。 @Brendan,您的 while 循环条件中有一个错字,我相信您的预期含义是 mp4.Metadata.Duration.TotalSeconds
    • 这使得从视频中提取 jpg's 非常简单和容易。谢谢。
    • 谢谢,mp4.Metadata.Duration.TotalSeconds 为我工作。
    【解决方案2】:

    我昨天才发现这个。

    这是一个完整且易于理解的示例,其中包含选择视频文件并在视频的第一秒保存快照。

    您可以选择适合您项目的部分并更改其中一些(即从相机获取视频分辨率)

    1) 和 3)

    TimeSpan timeOfFrame = new TimeSpan(0, 0, 1);
    
            //pick mp4 file
            var picker = new Windows.Storage.Pickers.FileOpenPicker();
            picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.VideosLibrary;
            picker.FileTypeFilter.Add(".mp4");
            StorageFile pickedFile = await picker.PickSingleFileAsync();
            if (pickedFile == null)
            {
                return;
            }
            ///
    
    
            //Get video resolution
            List<string> encodingPropertiesToRetrieve = new List<string>();
            encodingPropertiesToRetrieve.Add("System.Video.FrameHeight");
            encodingPropertiesToRetrieve.Add("System.Video.FrameWidth");
            IDictionary<string, object> encodingProperties = await pickedFile.Properties.RetrievePropertiesAsync(encodingPropertiesToRetrieve);
            uint frameHeight = (uint)encodingProperties["System.Video.FrameHeight"];
            uint frameWidth = (uint)encodingProperties["System.Video.FrameWidth"];
            ///
    
    
            //Use Windows.Media.Editing to get ImageStream
            var clip = await MediaClip.CreateFromFileAsync(pickedFile);
            var composition = new MediaComposition();
            composition.Clips.Add(clip);
    
            var imageStream = await composition.GetThumbnailAsync(timeOfFrame, (int)frameWidth, (int)frameHeight, VideoFramePrecision.NearestFrame);
            ///
    
    
            //generate bitmap 
            var writableBitmap = new WriteableBitmap((int)frameWidth, (int)frameHeight);
            writableBitmap.SetSource(imageStream);
    
    
            //generate some random name for file in PicturesLibrary
            var saveAsTarget = await KnownFolders.PicturesLibrary.CreateFileAsync("IMG" + Guid.NewGuid().ToString().Substring(0, 4) + ".jpg");
    
    
            //get stream from bitmap
            Stream stream = writableBitmap.PixelBuffer.AsStream();
            byte[] pixels = new byte[(uint)stream.Length];
            await stream.ReadAsync(pixels, 0, pixels.Length);
    
            using (var writeStream = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite))
            {
                var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, writeStream);
                encoder.SetPixelData(
                    BitmapPixelFormat.Bgra8,
                    BitmapAlphaMode.Premultiplied,
                    (uint)writableBitmap.PixelWidth,
                    (uint)writableBitmap.PixelHeight,
                    96,
                    96,
                    pixels);
                await encoder.FlushAsync();
    
                using (var outputStream = writeStream.GetOutputStreamAt(0))
                {
                    await outputStream.FlushAsync();
                }
            }
    

    如果你想在xaml Image中显示帧,你应该使用imageStream

    BitmapImage bitmapImage = new BitmapImage();
    bitmapImage.SetSource(imageStream);
    
    XAMLImage.Source = bitmapImage;
    

    如果要提取更多帧,还有composition.GetThumbnailsAsync

    2) 在计时器计时结束时使用您的 mediaCapture

    【讨论】:

    • 我认为这将无法正常工作..我的视频不会从路径中选择,我在流视频上录制,InMemoryRandomAccessStream buffer,有没有办法将InMemoryRandomAccessStream buffer 转换为storegfile 然后我可以也许用你的方式?感谢您的帮助。
    • 在 Record 方法中有 MediaCapture 对象,在 Play 方法中显然有 Storage 文件。什么不工作?
    • 如果您想在标准的 .net 桌面应用程序中使用它,请按照以下步骤操作 github.com/jbe2277/waf/wiki/…
    • 要获取视频元数据,可以在上面的代码中使用它来代替 var props = awaitpickFile.Properties.GetVideoPropertiesAsync(); uint frameHeight = props.Height; uint frameWidth = props.Width; var videolength = props.Duration;
    • 我无法包含在 WIndows 文件夹中找到的 Widows.Media.Editing.dll。 VS 说它不能包含它,因为它不是有效的程序集或 COM。我如何准确地使用这里使用的类?
    【解决方案3】:

    使用ffmpeg并安装Accord.Video.FFMPEG

    using (var vFReader = new VideoFileReader())
    {
        vFReader.Open("video.mp4");
        for (int i = 0; i < vFReader.FrameCount; i++)
        {
            Bitmap bmpBaseOriginal = vFReader.ReadVideoFrame();
        }
        vFReader.Close();
    }
    

    【讨论】:

    • 你在 c# 中为 ffmpeg 使用了哪个包装器?
    • @MajidHojati 这看起来像 Accord.net,它现在已经吸收了 Aforge。需要注意的一件事是它是在 GPL 下发布的。
    • 你为什么在using里面使用Close
    • 它可以工作,但FrameCount 始终为0,并且视频在结束后循环。如何检查视频结束?
    • @Qwertiy,我写了一个循环调用 ReadVideoFrame() 直到位图为空。
    【解决方案4】:

    另一种获取方式:

    我使用了FFMpegCore,带有.net core 3.1 + ubuntu (list of available images) 的官方 docker 镜像

    Dockerfile:

    FROM mcr.microsoft.com/dotnet/runtime:3.1-bionic
    RUN apt-get update && apt-get install -y ffmpeg libgdiplus
    
    COPY bin/Release/netcoreapp3.1/publish/ App/
    WORKDIR /App
    ENTRYPOINT ["dotnet", "YouConsoleAppNameHere.dll"]
    

    短代码版本:

    GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "/usr/bin", TemporaryFilesFolder = "/tmp" }); //configuring ffmpeg location
    
    string filePath = AppContext.BaseDirectory + "sample.mp4";    
    FFMpegArguments.FromFileInput(filePath).OutputToFile("tmp/Video/Frame%05d.png", true, Options => { Options.WithVideoCodec(VideoCodec.Png); }).ProcessSynchronously();    
    

    扩展版(带有一些控制台日志):

    using FFMpegCore;
    using FFMpegCore.Enums;
    ...
    GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "/usr/bin", TemporaryFilesFolder = "/tmp" }); //configuring ffmpeg location
    
    string filePath = AppContext.BaseDirectory + "sample.mp4";
    Console.WriteLine(filePath) ;
    Console.WriteLine(File.Exists(filePath));
    
    
    var mediaInfo = FFProbe.Analyse(filePath);
    Console.WriteLine("mp4 duration : " + mediaInfo.Duration);
    
    Directory.CreateDirectory("tmp");
    Directory.CreateDirectory("tmp/Video");
    Console.WriteLine("started " + DateTime.Now.ToLongTimeString());
    
    FFMpegArguments.FromFileInput(filePath).OutputToFile("tmp/Video/Frame%05d.png", true, Options => { Options.WithVideoCodec(VideoCodec.Png); }).ProcessSynchronously();
    Console.WriteLine("processed " + DateTime.Now.ToLongTimeString());
    
    Console.WriteLine(string.Join(", ", Directory.EnumerateFiles("tmp/Video/")));
    

    因此 - png 文件将被提取到 tmp/Video 文件夹。当然,如果需要,你也可以不使用 docker 来做同样的事情。

    【讨论】:

      猜你喜欢
      • 1970-01-01
      • 1970-01-01
      • 2014-02-27
      • 2023-03-30
      • 1970-01-01
      • 1970-01-01
      • 2018-05-27
      • 2017-11-01
      • 2012-02-22
      相关资源
      最近更新 更多