在上一篇文章WinUI3 FFmpeg.autogen解析视频帧,使用win2d显示内容. - 吃饭/睡觉 - 博客园 (cnblogs.com) 里已经将整个视频解码的流程都实现了,现在我们来将整个播放视频所需要的 播放,暂停,停止,和进度条功能都实现。
效果图
一. 视频跳转进度到指定的时间。播放器的播放,暂停,停止这几个功能都是控制视频播放的状态,蛮简单的在这就不展开讲了,但是视频的时间跳转就会稍微有点复杂,那么我就从视频时间跳转这个功能进行展开讲一下。
- ffmpeg.av_seek_frame()的函数参数说明
1)s : AVFormatContext类型的多媒体文件句柄
2)stream_index : int类型表示要进行操作的流索引
3)timestamp: long类型的时间戳,表示要跳转到的时间位置
4.) flags :跳转方法,主要有以下几种:
AVSEEK_FLAG_BACKWARD : 跳转到时间戳之前的最近关键帧
AVSEEK_FLAG_BYTE 基于字节位置的跳转
AVSEEK_FLAG_ANY 跳转到任意帧,不一定是关键帧
AVSEEK_FLAG_FRAME 基于帧数量的跳转
参数说明引用于:FFMPEG av_seek_frame - 知乎 (zhihu.com)
2.SeekProgress() 设置视频进度,参数为秒数,将视频到跳转到指定的时间。- public void SeekProgress(int seekTime)
- {
- if (format == null || videoStream == null)
- return;
- lock (SyncLock)
- {
- IsPlaying = false;//将视频暂停播放
- clock.Stop();
- //将秒数转换成视频的时间戳
- var timestamp = seekTime / ffmpeg.av_q2d(videoStream->time_base);
- //将媒体容器里面的指定流(视频)的时间戳设置到指定的位置,并指定跳转的方法;
- ffmpeg.av_seek_frame(format, videoStreamIndex, (long)timestamp, ffmpeg.AVSEEK_FLAG_BACKWARD | ffmpeg.AVSEEK_FLAG_FRAME);
- ffmpeg.av_frame_unref(frame);//清除上一帧的数据
- ffmpeg.av_packet_unref(packet); //清除上一帧的数据包
- int error = 0;
- //循环获取帧数据,判断获取的帧时间戳已经大于给定的时间戳则说明已经到达了指定的位置则退出循环
- while (packet->pts < timestamp)
- {
- do
- {
- do
- {
- ffmpeg.av_packet_unref(packet);//清除上一帧数据包
- error = ffmpeg.av_read_frame(format, packet);//读取数据
- if (error == ffmpeg.AVERROR_EOF)//是否是到达了视频的结束位置
- return;
- } while (packet->stream_index != videoStreamIndex);//判断当前获取的数据是否是视频数据
- ffmpeg.avcodec_send_packet(codecContext, packet);//将数据包发送给解码器解码
- error = ffmpeg.avcodec_receive_frame(codecContext, frame);//从解码器获取解码后的帧数据
- } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
- }
- OffsetClock = TimeSpan.FromSeconds(seekTime);//设置时间偏移
- clock.Restart();//时钟从新开始
- IsPlaying = true;//视频开始播放
- lastTime = TimeSpan.Zero;
- }
- }
复制代码 在代码里虽然我们已经调用了ffmpeg.av_seek_frame() 跳转到指定的时间戳了,但是这个函数并不会准确的跳转到我们想要的位置,因为它会帮我们跳转到指定位置的前最近关键帧,所以我们需要写一个while循环来不停的读取每一帧将不符合我们想要的帧都丢弃直到获取到我们指定的位置才结束。
二. DecodecVideo
我将上一篇解码视频的整个流程和上面跳转视频位置的代码都封装成了DecodecVideo 类,在类里面添加了 Play(),Pause(),Stop(),SeekProgress()函数以便于更加容易的控制视频播放。- public unsafe class DecodecVideo : IMedia { //媒体格式上下文(媒体容器) AVFormatContext* format; //编解码上下文 AVCodecContext* codecContext; //媒体数据包 AVPacket* packet; //媒体帧数据 AVFrame* frame; //图像转换器 SwsContext* convert; //视频流 AVStream* videoStream; // 视频流在媒体容器上流的索引 int videoStreamIndex; TimeSpan OffsetClock; //帧,数据指针 IntPtr FrameBufferPtr; byte_ptrArray4 TargetData; int_array4 TargetLinesize; object SyncLock = new object(); //时钟 Stopwatch clock = new Stopwatch(); //播放上一帧的时间 TimeSpan lastTime; bool isNextFrame = true; public event MediaHandler MediaCompleted; public event MediaHandler MediaPlay; public event MediaHandler MediaPause; #region //视频时长 public TimeSpan Duration { get; protected set; } //编解码器名字 public string CodecName { get; protected set; } public string CodecId { get; protected set; } //比特率 public int Bitrate { get; protected set; } //帧率 public double FrameRate { get; protected set; } //图像的高和款 public int FrameWidth { get; protected set; } public int FrameHeight { get; protected set; } //是否是正在播放中 public bool IsPlaying { get; protected set; } public MediaState State { get; protected set; } public TimeSpan Position { get => clock.Elapsed + OffsetClock; } //一帧显示时长 public TimeSpan frameDuration { get; private set; } #endregion /// /// 初始化解码视频 /// /// public void InitDecodecVideo(string path) { int error = 0; //创建一个 媒体格式上下文 format = ffmpeg.avformat_alloc_context(); if (format == null) { Debug.WriteLine("创建媒体格式(容器)失败"); return; } var tempFormat = format; //打开视频 error = ffmpeg.avformat_open_input(&tempFormat, path, null, null); if (error < 0) { Debug.WriteLine("打开视频失败"); return; } //获取流信息 ffmpeg.avformat_find_stream_info(format, null); //编解码器类型 AVCodec* codec = null; //获取视频流索引 videoStreamIndex = ffmpeg.av_find_best_stream(format, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0); if (videoStreamIndex < 0) { Debug.WriteLine("没有找到视频流"); return; } //根据流索引找到视频流 videoStream = format->streams[videoStreamIndex]; //创建解码器上下文 codecContext = ffmpeg.avcodec_alloc_context3(codec); //将视频流里面的解码器参数设置到 解码器上下文中 error = ffmpeg.avcodec_parameters_to_context(codecContext, videoStream->codecpar); if (error < 0) { Debug.WriteLine("设置解码器参数失败"); return; } //打开解码器 error = ffmpeg.avcodec_open2(codecContext, codec, null); if (error < 0) { Debug.WriteLine("打开解码器失败"); return; } //视频时长等视频信息 //Duration = TimeSpan.FromMilliseconds(videoStream->duration / ffmpeg.av_q2d(videoStream->time_base)); Duration = TimeSpan.FromMilliseconds(format->duration / 1000); CodecId = videoStream->codecpar->codec_id.ToString(); CodecName = ffmpeg.avcodec_get_name(videoStream->codecpar->codec_id); Bitrate = (int)videoStream->codecpar->bit_rate; FrameRate = ffmpeg.av_q2d(videoStream->r_frame_rate); FrameWidth = videoStream->codecpar->width; FrameHeight = videoStream->codecpar->height; frameDuration = TimeSpan.FromMilliseconds(1000 / FrameRate); //初始化转换器,将图片从源格式 转换成 BGR0 (8:8:8)格式 var result = InitConvert(FrameWidth, FrameHeight, codecContext->pix_fmt, FrameWidth, FrameHeight, AVPixelFormat.AV_PIX_FMT_BGR0); //所有内容都初始化成功了开启时钟,用来记录时间 if (result) { //从内存中分配控件给 packet 和frame packet = ffmpeg.av_packet_alloc(); frame = ffmpeg.av_frame_alloc(); } } /// /// 初始化转换器 /// /// 源宽度 /// 源高度 /// 源格式 /// 目标高度 /// 目标宽度 /// 目标格式 /// bool InitConvert(int sourceWidth, int sourceHeight, AVPixelFormat sourceFormat, int targetWidth, int targetHeight, AVPixelFormat targetFormat) { //根据输入参数和输出参数初始化转换器 convert = ffmpeg.sws_getContext(sourceWidth, sourceHeight, sourceFormat, targetWidth, targetHeight, targetFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); if (convert == null) { Debug.WriteLine("创建转换器失败"); return false; } //获取转换后图像的 缓冲区大小 var bufferSize = ffmpeg.av_image_get_buffer_size(targetFormat, targetWidth, targetHeight, 1); //创建一个指针 FrameBufferPtr = Marshal.AllocHGlobal(bufferSize); TargetData = new byte_ptrArray4(); TargetLinesize = new int_array4(); ffmpeg.av_image_fill_arrays(ref TargetData, ref TargetLinesize, (byte*)FrameBufferPtr, targetFormat, targetWidth, targetHeight, 1); return true; } public byte[] FrameConvertBytes(AVFrame* sourceFrame) { // 利用转换器将yuv 图像数据转换成指定的格式数据 ffmpeg.sws_scale(convert, sourceFrame->data, sourceFrame->linesize, 0, sourceFrame->height, TargetData, TargetLinesize); var data = new byte_ptrArray8(); data.UpdateFrom(TargetData); var linesize = new int_array8(); linesize.UpdateFrom(TargetLinesize); //创建一个字节数据,将转换后的数据从内存中读取成字节数组 byte[] bytes = new byte[FrameWidth * FrameHeight * 4]; Marshal.Copy((IntPtr)data[0], bytes, 0, bytes.Length); return bytes; } public bool TryReadNextFrame(out AVFrame outFrame) { if (lastTime == TimeSpan.Zero) { lastTime = Position; isNextFrame = true; } else { if (Position - lastTime >= frameDuration) { lastTime = Position; isNextFrame = true; } else { outFrame = *frame; return false; } } if (isNextFrame) { lock (SyncLock) { int result = -1; //清理上一帧的数据 ffmpeg.av_frame_unref(frame); while (true) { //清理上一帧的数据包 ffmpeg.av_packet_unref(packet); //读取下一帧,返回一个int 查看读取数据包的状态 result = ffmpeg.av_read_frame(format, packet); //读取了最后一帧了,没有数据了,退出读取帧 if (result == ffmpeg.AVERROR_EOF || result < 0) { outFrame = *frame; StopPlay(); return false; } //判断读取的帧数据是否是视频数据,不是则继续读取 if (packet->stream_index != videoStreamIndex) continue; //将包数据发送给解码器解码 ffmpeg.avcodec_send_packet(codecContext, packet); //从解码器中接收解码后的帧 result = ffmpeg.avcodec_receive_frame(codecContext, frame); if (result < 0) continue; outFrame = *frame; return true; } } } else { outFrame = *frame; return false; } } void StopPlay() { lock (SyncLock) { if (State == MediaState.None) return; IsPlaying = false; OffsetClock = TimeSpan.FromSeconds(0); clock.Reset(); clock.Stop(); var tempFormat = format; ffmpeg.avformat_free_context(tempFormat); format = null; var tempCodecContext = codecContext; ffmpeg.avcodec_free_context(&tempCodecContext); var tempPacket = packet; ffmpeg.av_packet_free(&tempPacket); var tempFrame = frame; ffmpeg.av_frame_free(&tempFrame); var tempConvert = convert; ffmpeg.sws_freeContext(convert); videoStream = null; videoStreamIndex = -1; //视频时长 Duration = TimeSpan.FromMilliseconds(0); //编解码器名字 CodecName = String.Empty; CodecId = String.Empty; //比特率 Bitrate = 0; //帧率 FrameRate = 0; //图像的高和款 FrameWidth = 0; FrameHeight = 0; State = MediaState.None; Marshal.FreeHGlobal(FrameBufferPtr); lastTime = TimeSpan.Zero; MediaCompleted?.Invoke(Duration); } } /// /// 更改进度 /// /// 更改到的位置(秒) public void SeekProgress(int seekTime)
- {
- if (format == null || videoStream == null)
- return;
- lock (SyncLock)
- {
- IsPlaying = false;//将视频暂停播放
- clock.Stop();
- //将秒数转换成视频的时间戳
- var timestamp = seekTime / ffmpeg.av_q2d(videoStream->time_base);
- //将媒体容器里面的指定流(视频)的时间戳设置到指定的位置,并指定跳转的方法;
- ffmpeg.av_seek_frame(format, videoStreamIndex, (long)timestamp, ffmpeg.AVSEEK_FLAG_BACKWARD | ffmpeg.AVSEEK_FLAG_FRAME);
- ffmpeg.av_frame_unref(frame);//清除上一帧的数据
- ffmpeg.av_packet_unref(packet); //清除上一帧的数据包
- int error = 0;
- //循环获取帧数据,判断获取的帧时间戳已经大于给定的时间戳则说明已经到达了指定的位置则退出循环
- while (packet->pts < timestamp)
- {
- do
- {
- do
- {
- ffmpeg.av_packet_unref(packet);//清除上一帧数据包
- error = ffmpeg.av_read_frame(format, packet);//读取数据
- if (error == ffmpeg.AVERROR_EOF)//是否是到达了视频的结束位置
- return;
- } while (packet->stream_index != videoStreamIndex);//判断当前获取的数据是否是视频数据
- ffmpeg.avcodec_send_packet(codecContext, packet);//将数据包发送给解码器解码
- error = ffmpeg.avcodec_receive_frame(codecContext, frame);//从解码器获取解码后的帧数据
- } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
- }
- OffsetClock = TimeSpan.FromSeconds(seekTime);//设置时间偏移
- clock.Restart();//时钟从新开始
- IsPlaying = true;//视频开始播放
- lastTime = TimeSpan.Zero;
- }
- } public void Play() { if (State == MediaState.Play) return; clock.Start(); IsPlaying = true; State = MediaState.Play; } public void Pause() { if (State != MediaState.Play) return; IsPlaying = false; OffsetClock = clock.Elapsed; clock.Stop(); clock.Reset(); State = MediaState.Pause; } public void Stop() { if (State == MediaState.None) return; StopPlay(); } }
复制代码- public enum MediaState
- {
- //没有播放
- None,
- Read,
- Play,
- Pause,
- }
- public interface IMedia
- {
- public delegate void MediaHandler(TimeSpan duration);
- public event MediaHandler MediaCompleted;
- }
复制代码三. 界面 - <Grid>
- <Grid.Resources>
-
-
- </Grid.Resources>
- <Grid.RowDefinitions>
- <RowDefinition></RowDefinition>
- <RowDefinition Height="auto"></RowDefinition>
- </Grid.RowDefinitions>
- <Grid.ColumnDefinitions>
- <ColumnDefinition ></ColumnDefinition>
- <ColumnDefinition Width="auto"></ColumnDefinition>
- </Grid.ColumnDefinitions>
- <canvas:CanvasControl x:Name="canvas"></canvas:CanvasControl>
- <StackPanel Background="Black" Grid.Column="1" Width="200">
- <control:DockPanel>
- <TextBlock Text="Duration" Style="{StaticResource Key}"></TextBlock>
- <TextBlock x:Name="dura" Text="00:00:00" Style="{StaticResource Value}"></TextBlock>
- </control:DockPanel>
- <control:DockPanel>
- <TextBlock Text="Position" Style="{StaticResource Key}"></TextBlock>
- <TextBlock x:Name="position" Text="00:00:00" Style="{StaticResource Value}"></TextBlock>
- </control:DockPanel>
- <control:DockPanel Background="LightBlue">
- <TextBlock Style="{StaticResource Key}">Has Video</TextBlock>
- <TextBlock Style="{StaticResource Value}" />
- </control:DockPanel>
- <control:DockPanel >
- <TextBlock Style="{StaticResource Key}" Text="Video Codec"></TextBlock>
- <TextBlock Style="{StaticResource Value}" x:Name="videoCodec" />
- </control:DockPanel>
- <control:DockPanel >
- <TextBlock Style="{StaticResource Key}" Text="Video Bitrate"></TextBlock>
- <TextBlock Style="{StaticResource Value}" x:Name="videoBitrate" />
- </control:DockPanel>
- <control:DockPanel >
- <TextBlock Style="{StaticResource Key}" Text="Video Width"></TextBlock>
- <TextBlock Style="{StaticResource Value}" x:Name="videoWidth"/>
- </control:DockPanel>
- <control:DockPanel >
- <TextBlock Style="{StaticResource Key}" Text="Video Height"></TextBlock>
- <TextBlock Style="{StaticResource Value}" x:Name="videoHeight" />
- </control:DockPanel>
- <control:DockPanel >
- <TextBlock Style="{StaticResource Key}" Text="Video FPS"></TextBlock>
- <TextBlock Style="{StaticResource Value}" x:Name="videoFps" />
- </control:DockPanel>
- </StackPanel>
- <StackPanel Grid.Row="1" Grid.ColumnSpan="2">
- <Grid>
- <Grid.ColumnDefinitions>
- <ColumnDefinition Width="auto"></ColumnDefinition>
- <ColumnDefinition ></ColumnDefinition>
- <ColumnDefinition Width="auto"></ColumnDefinition>
- </Grid.ColumnDefinitions>
- <TextBlock Text="{Binding ElementName=position,Path=Text,Mode=OneWay}"></TextBlock>
- <Slider Grid.Column="1" x:Name="progress"></Slider>
- <TextBlock Grid.Column="2" Text="00:00:00" x:Name="duration"></TextBlock>
- </Grid>
- <TextBox x:Name="pathBox" Text="C:\Users\ludin\Desktop\新建文件夹 (4)\1.mp4" PlaceholderText="地址输入"></TextBox>
- <StackPanel Grid.Row="1" Orientation="Horizontal" HorizontalAlignment="Center" VerticalAlignment="Center">
- <Button x:Name="play" >播放</Button>
- <Button x:Name="pause" >暂停</Button>
- <Button x:Name="stop" >停止</Button>
- </StackPanel>
- </StackPanel>
- </Grid>
复制代码 在界面中间位置上放置了一个 CanvasControl 用于绘制视频内容,右边放置了用于显示视频的信息的文本控件,在下方放置了用于控制视频播放和暂停的几个按钮和进度条。
四.后代代码- public unsafe sealed partial class FFmpegDecodecVideo : Page
- {
- Task PlayTask;
- CanvasBitmap bitmap;
- DispatcherTimer timer = new DispatcherTimer();
- bool progressActivity = false;
- DecodecVideo video = new DecodecVideo();
- public FFmpegDecodecVideo()
- {
- this.InitializeComponent();
- Init();
- InitUi();
- }
- void Init()
- {
- //播放
- play.Click += (s, e) =>
- {
- if (video.State == MediaState.None)
- {
- //初始化解码视频
- video.InitDecodecVideo(pathBox.Text);
- DisplayVideoInfo();
- }
- video.Play();
- timer.Start();
- };
- //暂停
- pause.Click += (s, e) => video.Pause();
- stop.Click += (s, e) => video.Stop(); ;
- PlayTask = new Task(() =>
- {
- while (true)
- {
- //播放中
- if (video.IsPlaying)
- {
- //获取下一帧视频
- if (video.TryReadNextFrame(out var frame))
- {
- var bytes = video.FrameConvertBytes(&frame);
- bitmap = CanvasBitmap.CreateFromBytes(CanvasDevice.GetSharedDevice(), bytes, video.FrameWidth, video.FrameHeight, DirectXPixelFormat.B8G8R8A8UIntNormalized);
- canvas.Invalidate();
- }
- }
- }
- });
- PlayTask.Start();
- video.MediaCompleted += (s) =>
- {
- DispatcherQueue.TryEnqueue(Microsoft.UI.Dispatching.DispatcherQueuePriority.Normal, () =>
- {
- timer.Stop();
- progressActivity = false;
- DisplayVideoInfo();
- });
- };
- }
- void InitUi()
- {
- //画布绘制
- canvas.Draw += (s, e) =>
- {
- if (bitmap != null)
- {
- var te = Win2DUlit.CalcutateImageCenteredTransform(canvas.ActualSize, bitmap.Size);
- te.Source = bitmap;
- e.DrawingSession.DrawImage(te);
- }
- };
- timer.Interval = TimeSpan.FromMilliseconds(300);
- //计时器更新进度条
- timer.Tick += (s, e) =>
- {
- if (!video.IsPlaying)
- return;
- position.Text = video.Position.ToString();
- progressActivity = false;
- progress.Value = video.Position.TotalSeconds;
- progressActivity = true;
- };
- //进度条更改
- progress.ValueChanged += (s, e) =>
- {
- if (!video.IsPlaying)
- return;
- if (progressActivity == true)
- {
- video.SeekProgress((int)e.NewValue);
- }
- };
- }
- /// <summary>
- /// 显示视频信息
- /// </summary>
- void DisplayVideoInfo()
- {
- dura.Text = video.Duration.ToString();
- videoCodec.Text = video.CodecName;
- videoBitrate.Text = video.Bitrate.ToString();
- videoWidth.Text = video.FrameWidth.ToString();
- videoHeight.Text = video.FrameHeight.ToString();
- videoFps.Text = video.FrameRate.ToString();
- duration.Text = video.Duration.ToString();
- position.Text = video.Position.ToString();
- progress.Maximum = video.Duration.TotalSeconds;
- }
- }
复制代码 五. 结语
在这里,我们已经将一个普通的播放视频的播放器就写完了,有了播放,暂停,停止,更改进度的功能。下一篇文章我将展示如何通过ffmpeg解码音频用 NAudio播放音频;
项目Demo地址:LearnFFmppeg: 学习和记录ffmpeg - Gitee.com
免责声明:如果侵犯了您的权益,请联系站长,我们会及时删除侵权内容,谢谢合作! |