簡體   English   中英

從視頻 C# 中提取幀

[英]Extract Frames from Video C#

我正在嘗試制作一個使用相機錄制視頻並處理視頻圖像的應用程序。 這就是我想要的。 首先,我的應用程序使用 Torch 錄制了一個 10 秒的視頻。 其次,我使用一種方法來播放視頻,看看我錄制了什么。

我被三件事困住了。

  1. 如何將我的視頻轉換為單獨的幀(圖像)?
  2. 是否可以在錄制視頻時異步轉換視頻?
  3. 當我將視頻轉換為單獨的幀時,我該如何處理它們? 它們是 JPEG 嗎? 我可以簡單地將它們顯示為圖像嗎? 等等。

主要代碼:

using System;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;

using Windows.UI.Xaml.Navigation;

namespace App3
{

public sealed partial class MainPage : Page
{          
    DispatcherTimer D;
    double basetimer = 0;
    public MainPage()
    {
        this.InitializeComponent();       
        this.NavigationCacheMode = NavigationCacheMode.Required;
        D = new DispatcherTimer();      
        D.Interval = new TimeSpan(0, 0, 1);
        D.Tick += timer_Tick;
        txt.Text = basetimer.ToString();
        Play.IsEnabled = false;            
    }  
    public Library Library = new Library();
    public object PreviewImage { get; private set; }
    void timer_Tick(object sender, object e)
    {
        basetimer = basetimer - 1;
        txt.Text = basetimer.ToString();
        if (basetimer == 0)
        {
            D.Stop();               
            Preview.Source = null;
            Library.Stop();
            Record.IsEnabled = false;
            Play.IsEnabled = true;
            Clear.IsEnabled = true;
            if (Library._tc.Enabled)
            {
                Library._tc.Enabled = false;
            }                
        }
    }
    private void Record_Click(object sender, RoutedEventArgs e)
    {            
        if (Library.Recording)
        {
            Preview.Source = null;
            Library.Stop();
            Record.Icon = new SymbolIcon(Symbol.Video);                
        }
        else
        {
            basetimer = 11;
            D.Start();
            //D.Tick += timer_Tick;
            Display.Source = null;
            Library.Record(Preview);
            Record.Icon = new SymbolIcon(Symbol.VideoChat);
            Record.IsEnabled = false;
            Play.IsEnabled = false;
        }
    }
    private async void Play_Click(object sender, RoutedEventArgs e)
    {            
        await Library.Play(Dispatcher, Display);
        //Extract_Image_From_Video(Library.buffer);            
    }
    private  void Clear_Click(object sender, RoutedEventArgs e)
    {
        Display.Source = null;            
        Record.Icon = new SymbolIcon(Symbol.Video);
        txt.Text = "0";
        basetimer=  0;
        Play.IsEnabled = false;
        Record.IsEnabled =true;
        if (Library.capture != null)
        {
            D.Stop();
            Library.Recording = false;
            Preview.Source = null;               
            Library.capture.Dispose();
            Library.capture = null;
            basetimer = 11;
        }
        }
    }
}

圖書館類:

using System;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Windows.Devices.Enumeration;
using Windows.Media.Capture;
using Windows.Media.Devices;
using Windows.Media.MediaProperties;
using Windows.Storage;
using Windows.Storage.Streams;
using Windows.UI.Core;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Media.Imaging;
using Windows.Graphics.Imaging;
using Emgu.CV.Structure;
using Emgu.CV;
using System.Collections.Generic;

public class Library
{

private const string videoFilename = "video.mp4";
private string filename;
public MediaCapture capture;
public InMemoryRandomAccessStream buffer;
public static bool Recording;
public TorchControl _tc;
public int basetimer  ;   
public async Task<bool> init()
{
    if (buffer != null)
    {
        buffer.Dispose();
    }
    buffer = new InMemoryRandomAccessStream();
    if (capture != null)
    {
        capture.Dispose();
    }
    try
    {

        if (capture == null)
        {
            var allVideoDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);               
            DeviceInformation cameraDevice =
            allVideoDevices.FirstOrDefault(x => x.EnclosureLocation != null &&
            x.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back);
            capture = new MediaCapture();
            var mediaInitSettings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id };
            // Initialize 
            try
            {
                await capture.InitializeAsync(mediaInitSettings);
                var videoDev = capture.VideoDeviceController;
                _tc = videoDev.TorchControl;
                Recording = false;
                _tc.Enabled = false;                                      
            }
            catch (UnauthorizedAccessException)
            {
                Debug.WriteLine("UnauthorizedAccessExeption>>");
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when initializing MediaCapture with {0}: {1}", cameraDevice.Id, ex.ToString());
            }
        }
            capture.Failed += (MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) =>
        {
            Recording = false;
            _tc.Enabled = false;
            throw new Exception(string.Format("Code: {0}. {1}", errorEventArgs.Code, errorEventArgs.Message));
        };
    }
    catch (Exception ex)
    {
        if (ex.InnerException != null && ex.InnerException.GetType() == typeof(UnauthorizedAccessException))
        {
            throw ex.InnerException;
        }
        throw;
    }
    return true;
}
public async void Record(CaptureElement preview)
{    
    await init();
    preview.Source = capture; 
    await capture.StartPreviewAsync();
    await capture.StartRecordToStreamAsync(MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto), buffer);
    if (Recording) throw new InvalidOperationException("cannot excute two records at the same time");
    Recording = true;
    _tc.Enabled = true;

}
public async void Stop()
{
    await capture.StopRecordAsync();
    Recording = false;
    _tc.Enabled = false;       
}    

public async Task Play(CoreDispatcher dispatcher, MediaElement playback)
{
    IRandomAccessStream video = buffer.CloneStream();

    if (video == null) throw new ArgumentNullException("buffer");
    StorageFolder storageFolder = Windows.ApplicationModel.Package.Current.InstalledLocation;
    if (!string.IsNullOrEmpty(filename))
    {
        StorageFile original = await storageFolder.GetFileAsync(filename);
        await original.DeleteAsync();
    }
    await dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
    {
        StorageFile storageFile = await storageFolder.CreateFileAsync(videoFilename, CreationCollisionOption.GenerateUniqueName);
        filename = storageFile.Name;
        using (IRandomAccessStream fileStream = await storageFile.OpenAsync(FileAccessMode.ReadWrite))
        {
            await RandomAccessStream.CopyAndCloseAsync(video.GetInputStreamAt(0), fileStream.GetOutputStreamAt(0));
            await video.FlushAsync();
            video.Dispose();
        }
        IRandomAccessStream stream = await storageFile.OpenAsync(FileAccessMode.Read);

        playback.SetSource(stream, storageFile.FileType);
        playback.Play();





    });


}   

在使用 Accord 遇到很多麻煩之后,我最終使用MediaToolkit來解決類似的問題。

我需要為視頻的每一秒保存一張圖像:

using (var engine = new Engine())
{
    var mp4 = new MediaFile { Filename = mp4FilePath };

    engine.GetMetadata(mp4);

    var i = 0;
    while (i < mp4.Metadata.Duration.Seconds)
    {
        var options = new ConversionOptions { Seek = TimeSpan.FromSeconds(i) };
        var outputFile = new MediaFile { Filename = string.Format("{0}\\image-{1}.jpeg", outputPath, i) };
        engine.GetThumbnail(mp4, outputFile, options);
        i++;
    }
}

希望有一天這對某人有所幫助。

.NET 5 更新:

最近,我需要更新此代碼以在 .NET 5 中工作。為此,我使用了MediaToolkit.NetCore ,它已經預覽了一年多。 另請注意:您需要為您的應用程序提供最新的 ffmpeg,包括所有 3 個 .exe 文件(ffmpeg、ffplay、ffprobe)。

事不宜遲,這里是更新的代碼:

// _env is the injected IWebHostEnvironment
// _tempPath is temporary file storage
var ffmpegPath = Path.Combine(_env.ContentRootPath, "<path-to-ffmpeg.exe>");

var mediaToolkitService = MediaToolkitService.CreateInstance(ffmpegPath);
var metadataTask = new FfTaskGetMetadata(_tempFile);
var metadata = await mediaToolkitService.ExecuteAsync(metadataTask);

var i = 0;
while (i < metadata.Metadata.Streams.First().DurationTs)
{
    var outputFile = string.Format("{0}\\image-{1:0000}.jpeg", _imageDir, i);
    var thumbTask = new FfTaskSaveThumbnail(_tempFile, outputFile, TimeSpan.FromSeconds(i));
    _ = await mediaToolkitService.ExecuteAsync(thumbTask);
    i++;
}

我昨天才弄明白這一點。

這是一個完整且易於理解的示例,其中包含選擇視頻文件並在視頻的第一秒保存快照。

您可以選擇適合您項目的部分並更改其中的一些(即從相機獲取視頻分辨率)

1) 和 3)

TimeSpan timeOfFrame = new TimeSpan(0, 0, 1);

        //pick mp4 file
        var picker = new Windows.Storage.Pickers.FileOpenPicker();
        picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.VideosLibrary;
        picker.FileTypeFilter.Add(".mp4");
        StorageFile pickedFile = await picker.PickSingleFileAsync();
        if (pickedFile == null)
        {
            return;
        }
        ///


        //Get video resolution
        List<string> encodingPropertiesToRetrieve = new List<string>();
        encodingPropertiesToRetrieve.Add("System.Video.FrameHeight");
        encodingPropertiesToRetrieve.Add("System.Video.FrameWidth");
        IDictionary<string, object> encodingProperties = await pickedFile.Properties.RetrievePropertiesAsync(encodingPropertiesToRetrieve);
        uint frameHeight = (uint)encodingProperties["System.Video.FrameHeight"];
        uint frameWidth = (uint)encodingProperties["System.Video.FrameWidth"];
        ///


        //Use Windows.Media.Editing to get ImageStream
        var clip = await MediaClip.CreateFromFileAsync(pickedFile);
        var composition = new MediaComposition();
        composition.Clips.Add(clip);

        var imageStream = await composition.GetThumbnailAsync(timeOfFrame, (int)frameWidth, (int)frameHeight, VideoFramePrecision.NearestFrame);
        ///


        //generate bitmap 
        var writableBitmap = new WriteableBitmap((int)frameWidth, (int)frameHeight);
        writableBitmap.SetSource(imageStream);


        //generate some random name for file in PicturesLibrary
        var saveAsTarget = await KnownFolders.PicturesLibrary.CreateFileAsync("IMG" + Guid.NewGuid().ToString().Substring(0, 4) + ".jpg");


        //get stream from bitmap
        Stream stream = writableBitmap.PixelBuffer.AsStream();
        byte[] pixels = new byte[(uint)stream.Length];
        await stream.ReadAsync(pixels, 0, pixels.Length);

        using (var writeStream = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite))
        {
            var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, writeStream);
            encoder.SetPixelData(
                BitmapPixelFormat.Bgra8,
                BitmapAlphaMode.Premultiplied,
                (uint)writableBitmap.PixelWidth,
                (uint)writableBitmap.PixelHeight,
                96,
                96,
                pixels);
            await encoder.FlushAsync();

            using (var outputStream = writeStream.GetOutputStreamAt(0))
            {
                await outputStream.FlushAsync();
            }
        }

如果要在 xaml Image 中顯示幀,則應使用 imageStream

BitmapImage bitmapImage = new BitmapImage();
bitmapImage.SetSource(imageStream);

XAMLImage.Source = bitmapImage;

如果你想提取更多的幀,還有composition.GetThumbnailsAsync

2)當您的計時器滴答作響時,使用您的 mediaCapture

使用ffmpeg並安裝Accord.Video.FFMPEG

using (var vFReader = new VideoFileReader())
{
    vFReader.Open("video.mp4");
    for (int i = 0; i < vFReader.FrameCount; i++)
    {
        Bitmap bmpBaseOriginal = vFReader.ReadVideoFrame();
    }
    vFReader.Close();
}

另一種獲得它的方法:

我使用了FFMpegCore ,官方docker鏡像和 .net core 3.1 + ubuntu(可用鏡像列表

Dockerfile :

FROM mcr.microsoft.com/dotnet/runtime:3.1-bionic
RUN apt-get update && apt-get install -y ffmpeg libgdiplus

COPY bin/Release/netcoreapp3.1/publish/ App/
WORKDIR /App
ENTRYPOINT ["dotnet", "YouConsoleAppNameHere.dll"]

短代碼版本:

GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "/usr/bin", TemporaryFilesFolder = "/tmp" }); //configuring ffmpeg location

string filePath = AppContext.BaseDirectory + "sample.mp4";    
FFMpegArguments.FromFileInput(filePath).OutputToFile("tmp/Video/Frame%05d.png", true, Options => { Options.WithVideoCodec(VideoCodec.Png); }).ProcessSynchronously();    

擴展版本(帶有一些控制台日志):

using FFMpegCore;
using FFMpegCore.Enums;
...
GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "/usr/bin", TemporaryFilesFolder = "/tmp" }); //configuring ffmpeg location

string filePath = AppContext.BaseDirectory + "sample.mp4";
Console.WriteLine(filePath) ;
Console.WriteLine(File.Exists(filePath));


var mediaInfo = FFProbe.Analyse(filePath);
Console.WriteLine("mp4 duration : " + mediaInfo.Duration);

Directory.CreateDirectory("tmp");
Directory.CreateDirectory("tmp/Video");
Console.WriteLine("started " + DateTime.Now.ToLongTimeString());

FFMpegArguments.FromFileInput(filePath).OutputToFile("tmp/Video/Frame%05d.png", true, Options => { Options.WithVideoCodec(VideoCodec.Png); }).ProcessSynchronously();
Console.WriteLine("processed " + DateTime.Now.ToLongTimeString());

Console.WriteLine(string.Join(", ", Directory.EnumerateFiles("tmp/Video/")));

結果 - png 文件將被提取到 tmp/Video 文件夾。 當然,如果需要,您可以在沒有 docker 的情況下執行相同操作。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM