diff --git a/Samples/MediaPlayerCPP/MainPage.xaml b/Samples/MediaPlayerCPP/MainPage.xaml
index f7a5121681..a84385531d 100644
--- a/Samples/MediaPlayerCPP/MainPage.xaml
+++ b/Samples/MediaPlayerCPP/MainPage.xaml
@@ -60,6 +60,10 @@
+
+
+
+
diff --git a/Samples/MediaPlayerCPP/MainPage.xaml.cpp b/Samples/MediaPlayerCPP/MainPage.xaml.cpp
index dbae09153c..c823288147 100644
--- a/Samples/MediaPlayerCPP/MainPage.xaml.cpp
+++ b/Samples/MediaPlayerCPP/MainPage.xaml.cpp
@@ -97,6 +97,20 @@ task MainPage::TryOpenLastFile()
}
}
+task MainPage::TryOpenLastUri()
+{
+ try
+ {
+ //Try open last uri
+ auto uri = (String^)ApplicationData::Current->LocalSettings->Values->Lookup("LastUri");
+ co_await OpenUriStream(uri);
+ }
+ catch (Exception^ ex)
+ {
+ DisplayErrorMessage(ex->Message);
+ }
+}
+
void MainPage::OpenLocalFile(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
OpenLocalFile();
@@ -133,16 +147,15 @@ task MainPage::OpenLocalFile(StorageFile^ file)
// Open StorageFile as IRandomAccessStream to be passed to FFmpegMediaSource
try
{
+ StorageApplicationPermissions::FutureAccessList->Clear();
+ StorageApplicationPermissions::FutureAccessList->Add(file);
+
auto stream = co_await file->OpenAsync(FileAccessMode::Read);
// Instantiate FFmpegMediaSource using the opened local file stream
FFmpegMSS = co_await FFmpegMediaSource::CreateFromStreamAsync(stream, Config);
- StorageApplicationPermissions::FutureAccessList->Clear();
- StorageApplicationPermissions::FutureAccessList->Add(file);
-
playbackItem = FFmpegMSS->CreateMediaPlaybackItem();
-
// Pass MediaPlaybackItem to Media Element
mediaPlayer->Source = playbackItem;
@@ -181,8 +194,7 @@ task MainPage::OpenUriStream(Platform::String^ uri)
// https://www.ffmpeg.org/ffmpeg-formats.html
//
// If format cannot be detected, try to increase probesize, max_probe_packets and analyzeduration!
-
- // Below are some sample options that you can set to configure RTSP streaming
+
//Config->FFmpegOptions->Insert("rtsp_flags", "prefer_tcp");
Config->FFmpegOptions->Insert("stimeout", 1000000);
Config->FFmpegOptions->Insert("timeout", 1000000);
@@ -191,6 +203,8 @@ task MainPage::OpenUriStream(Platform::String^ uri)
mediaPlayer->Source = nullptr;
try
{
+ ApplicationData::Current->LocalSettings->Values->Insert("LastUri", uri);
+
FFmpegMSS = co_await FFmpegMediaSource::CreateFromUriAsync(uri, Config);
playbackItem = FFmpegMSS->CreateMediaPlaybackItem();
@@ -500,14 +514,38 @@ void MediaPlayerCPP::MainPage::OnKeyDown(Windows::UI::Core::CoreWindow^ sender,
TryOpenLastFile();
}
+ if (args->VirtualKey == Windows::System::VirtualKey::Enter && (Window::Current->CoreWindow->GetKeyState(Windows::System::VirtualKey::Shift) & Windows::UI::Core::CoreVirtualKeyStates::Down)
+ == Windows::UI::Core::CoreVirtualKeyStates::Down && ApplicationData::Current->LocalSettings->Values->HasKey("LastUri"))
+ {
+ TryOpenLastUri();
+ }
+
+ if (args->Handled)
+ {
+ return;
+ }
+
if (args->VirtualKey == Windows::System::VirtualKey::V)
{
if (playbackItem && playbackItem->VideoTracks->Size > 1)
{
- playbackItem->VideoTracks->SelectedIndex =
+ bool reverse = (Window::Current->CoreWindow->GetKeyState(Windows::System::VirtualKey::Shift) & Windows::UI::Core::CoreVirtualKeyStates::Down) == Windows::UI::Core::CoreVirtualKeyStates::Down;
+ int index = reverse ?
+ (playbackItem->VideoTracks->SelectedIndex - 1) % playbackItem->VideoTracks->Size :
(playbackItem->VideoTracks->SelectedIndex + 1) % playbackItem->VideoTracks->Size;
+ playbackItem->VideoTracks->SelectedIndex = index;
}
}
+
+ if (args->VirtualKey == Windows::System::VirtualKey::Right && FFmpegMSS && mediaPlayer->PlaybackSession->CanSeek)
+ {
+ mediaPlayer->PlaybackSession->Position = TimeSpan{ mediaPlayer->PlaybackSession->Position.Duration + 50000000 };
+ }
+
+ if (args->VirtualKey == Windows::System::VirtualKey::Left && FFmpegMSS && mediaPlayer->PlaybackSession->CanSeek)
+ {
+ mediaPlayer->PlaybackSession->Position = TimeSpan{ mediaPlayer->PlaybackSession->Position.Duration - 50000000 };
+ }
}
@@ -560,3 +598,13 @@ void MediaPlayerCPP::MainPage::ffmpegAudioFilters_KeyDown(Platform::Object^ send
}
}
}
+
+double MediaPlayerCPP::MainPage::GetBufferSizeMB()
+{
+ return (double)Config->ReadAheadBufferSize / (1024*1024);
+}
+
+void MediaPlayerCPP::MainPage::SetBufferSizeMB(double value)
+{
+ Config->ReadAheadBufferSize = (long long)(value * (1024 * 1024));
+}
diff --git a/Samples/MediaPlayerCPP/MainPage.xaml.h b/Samples/MediaPlayerCPP/MainPage.xaml.h
index cce545cff6..98831356fc 100644
--- a/Samples/MediaPlayerCPP/MainPage.xaml.h
+++ b/Samples/MediaPlayerCPP/MainPage.xaml.h
@@ -1,4 +1,4 @@
-//*****************************************************************************
+//*****************************************************************************
//
// Copyright 2015 Microsoft Corporation
//
@@ -39,11 +39,15 @@ namespace MediaPlayerCPP
property FFmpegInteropX::MediaSourceConfig^ Config;
property FFmpegInteropX::VideoEffectConfiguration^ VideoEffectConfiguration;
+ double GetBufferSizeMB();
+ void SetBufferSizeMB(double value);
+
private:
void OpenLocalFile(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
task OpenLocalFile();
task OpenLocalFile(Windows::Storage::StorageFile^ file);
task TryOpenLastFile();
+ task TryOpenLastUri();
void URIBoxKeyUp(Platform::Object^ sender, Windows::UI::Xaml::Input::KeyRoutedEventArgs^ e);
task OpenUriStream(Platform::String^ uri);
void MediaFailed(Platform::Object^ sender, Windows::UI::Xaml::ExceptionRoutedEventArgs^ e);
@@ -81,5 +85,6 @@ namespace MediaPlayerCPP
void ffmpegAudioFilters_KeyDown(Platform::Object^ sender, Windows::UI::Xaml::Input::KeyRoutedEventArgs^ e);
void OnMediaOpened(Windows::Media::Playback::MediaPlayer^ sender, Platform::Object^ args);
void OnMediaFailed(Windows::Media::Playback::MediaPlayer^ sender, Windows::Media::Playback::MediaPlayerFailedEventArgs^ args);
+
};
}
diff --git a/Samples/MediaPlayerCS/MainPage.xaml b/Samples/MediaPlayerCS/MainPage.xaml
index 471fc21dc2..ad865237e2 100644
--- a/Samples/MediaPlayerCS/MainPage.xaml
+++ b/Samples/MediaPlayerCS/MainPage.xaml
@@ -62,6 +62,10 @@
+
+
+
+
diff --git a/Samples/MediaPlayerCS/MainPage.xaml.cs b/Samples/MediaPlayerCS/MainPage.xaml.cs
index 079cbc57eb..5185cdf3cc 100644
--- a/Samples/MediaPlayerCS/MainPage.xaml.cs
+++ b/Samples/MediaPlayerCS/MainPage.xaml.cs
@@ -93,14 +93,38 @@ private async void MainPage_KeyDown(CoreWindow sender, KeyEventArgs args)
{
await TryOpenLastFile();
}
+ if (args.VirtualKey == VirtualKey.Enter && (Window.Current.CoreWindow.GetKeyState(VirtualKey.Shift) & CoreVirtualKeyStates.Down)
+ == CoreVirtualKeyStates.Down && ApplicationData.Current.LocalSettings.Values.ContainsKey("LastUri"))
+ {
+ await TryOpenLastUri();
+ }
+
+ if (args.Handled)
+ {
+ return;
+ }
+
if (args.VirtualKey == VirtualKey.V)
{
if (playbackItem != null && playbackItem.VideoTracks.Count > 1)
{
- playbackItem.VideoTracks.SelectedIndex =
+ bool reverse = (Window.Current.CoreWindow.GetKeyState(VirtualKey.Control) & CoreVirtualKeyStates.Down) == CoreVirtualKeyStates.Down;
+ int index = reverse ?
+ (playbackItem.VideoTracks.SelectedIndex - 1) % playbackItem.VideoTracks.Count :
(playbackItem.VideoTracks.SelectedIndex + 1) % playbackItem.VideoTracks.Count;
+ playbackItem.VideoTracks.SelectedIndex = index;
}
}
+
+ if (args.VirtualKey == VirtualKey.Right && FFmpegMSS != null && mediaPlayer.PlaybackSession.CanSeek)
+ {
+ mediaPlayer.PlaybackSession.Position += TimeSpan.FromSeconds(5);
+ }
+
+ if (args.VirtualKey == VirtualKey.Left && FFmpegMSS != null && mediaPlayer.PlaybackSession.CanSeek)
+ {
+ mediaPlayer.PlaybackSession.Position -= TimeSpan.FromSeconds(5);
+ }
}
private async void CodecChecker_CodecRequired(object sender, CodecRequiredEventArgs args)
@@ -152,6 +176,18 @@ private async Task TryOpenLastFile()
{
}
}
+ private async Task TryOpenLastUri()
+ {
+ try
+ {
+ //Try open last uri
+ var uri = (string)ApplicationData.Current.LocalSettings.Values["LastUri"];
+ await OpenStreamUri(uri);
+ }
+ catch (Exception)
+ {
+ }
+ }
public MediaSourceConfig Config { get; set; }
@@ -227,36 +263,43 @@ private async void URIBoxKeyUp(object sender, KeyRoutedEventArgs e)
// Mark event as handled to prevent duplicate event to re-triggered
e.Handled = true;
- try
- {
- // Set FFmpeg specific options:
- // https://www.ffmpeg.org/ffmpeg-protocols.html
- // https://www.ffmpeg.org/ffmpeg-formats.html
+ await OpenStreamUri(uri);
+ }
+ }
- // If format cannot be detected, try to increase probesize, max_probe_packets and analyzeduration!
+ private async Task OpenStreamUri(string uri)
+ {
+ try
+ {
+ ApplicationData.Current.LocalSettings.Values["LastUri"] = uri;
- // Below are some sample options that you can set to configure RTSP streaming
- // Config.FFmpegOptions.Add("rtsp_flags", "prefer_tcp");
- Config.FFmpegOptions.Add("stimeout", 1000000);
- Config.FFmpegOptions.Add("timeout", 1000000);
+ // Set FFmpeg specific options:
+ // https://www.ffmpeg.org/ffmpeg-protocols.html
+ // https://www.ffmpeg.org/ffmpeg-formats.html
- // Instantiate FFmpegMediaSource using the URI
- mediaPlayer.Source = null;
- FFmpegMSS = await FFmpegMediaSource.CreateFromUriAsync(uri, Config);
+ // If format cannot be detected, try to increase probesize, max_probe_packets and analyzeduration!
- var source = FFmpegMSS.CreateMediaPlaybackItem();
+ // Config.FFmpegOptions.Add("rtsp_flags", "prefer_tcp");
+ Config.FFmpegOptions.Add("stimeout", 1000000);
+ Config.FFmpegOptions.Add("timeout", 1000000);
- // Pass MediaStreamSource to Media Element
- mediaPlayer.Source = source;
+ // Instantiate FFmpegMediaSource using the URI
+ mediaPlayer.Source = null;
+ FFmpegMSS = await FFmpegMediaSource.CreateFromUriAsync(uri, Config);
- // Close control panel after opening media
- Splitter.IsPaneOpen = false;
+ if (AutoCreatePlaybackItem)
+ {
+ CreatePlaybackItemAndStartPlaybackInternal();
}
- catch (Exception ex)
+ else
{
- await DisplayErrorMessage(ex.Message);
+ playbackItem = null;
}
}
+ catch (Exception ex)
+ {
+ await DisplayErrorMessage(ex.Message);
+ }
}
private async void ExtractFrame(object sender, RoutedEventArgs e)
@@ -585,5 +628,15 @@ private void ffmpegAudioFilters_LostFocus(object sender, RoutedEventArgs e)
Config.FFmpegAudioFilters = ffmpegAudioFilters.Text;
FFmpegMSS?.SetFFmpegAudioFilters(ffmpegAudioFilters.Text);
}
+
+ private double GetBufferSizeMB()
+ {
+ return Config.ReadAheadBufferSize / (1024 * 1024);
+ }
+
+ private long SetBufferSizeMB(double value)
+ {
+ return Config.ReadAheadBufferSize = (long)(value * (1024 * 1024));
+ }
}
}
diff --git a/Samples/MediaPlayerWinUI/App.xaml.cs b/Samples/MediaPlayerWinUI/App.xaml.cs
index 57ab3d1d72..2e9be540f3 100644
--- a/Samples/MediaPlayerWinUI/App.xaml.cs
+++ b/Samples/MediaPlayerWinUI/App.xaml.cs
@@ -1,20 +1,4 @@
-using Microsoft.UI.Xaml;
-using Microsoft.UI.Xaml.Controls;
-using Microsoft.UI.Xaml.Controls.Primitives;
-using Microsoft.UI.Xaml.Data;
-using Microsoft.UI.Xaml.Input;
-using Microsoft.UI.Xaml.Media;
-using Microsoft.UI.Xaml.Navigation;
-using Microsoft.UI.Xaml.Shapes;
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using System.Runtime.InteropServices.WindowsRuntime;
-using Windows.ApplicationModel;
-using Windows.ApplicationModel.Activation;
-using Windows.Foundation;
-using Windows.Foundation.Collections;
+using Microsoft.UI.Xaml;
// To learn more about WinUI, the WinUI project structure,
// and more about our project templates, see: http://aka.ms/winui-project-info.
diff --git a/Samples/MediaPlayerWinUI/MainPage.xaml.cs b/Samples/MediaPlayerWinUI/MainPage.xaml.cs
index a57dbeefee..b23d5577fc 100644
--- a/Samples/MediaPlayerWinUI/MainPage.xaml.cs
+++ b/Samples/MediaPlayerWinUI/MainPage.xaml.cs
@@ -1,13 +1,9 @@
using Microsoft.UI.Xaml;
using Microsoft.UI.Xaml.Controls;
-using Microsoft.UI.Xaml.Controls.Primitives;
using Microsoft.UI.Xaml.Data;
using Microsoft.UI.Xaml.Input;
-using Microsoft.UI.Xaml.Media;
-using Microsoft.UI.Xaml.Navigation;
using System;
using System.Collections.Generic;
-using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading.Tasks;
@@ -89,14 +85,39 @@ private async void MainPage_KeyDown(object sender, KeyRoutedEventArgs args)
{
await TryOpenLastFile();
}
+
+ if (args.Key == VirtualKey.Enter && (Microsoft.UI.Input.InputKeyboardSource.GetKeyStateForCurrentThread(VirtualKey.Shift) & CoreVirtualKeyStates.Down)
+ == CoreVirtualKeyStates.Down && StorageApplicationPermissions.FutureAccessList.Entries.Count == 1)
+ {
+ await TryOpenLastFile();
+ }
+
+ if (args.Handled)
+ {
+ return;
+ }
+
if (args.Key == VirtualKey.V && !args.Handled)
{
if (playbackItem != null && playbackItem.VideoTracks.Count > 1)
{
- playbackItem.VideoTracks.SelectedIndex =
+ bool reverse = (Microsoft.UI.Input.InputKeyboardSource.GetKeyStateForCurrentThread(VirtualKey.Control) & CoreVirtualKeyStates.Down) == CoreVirtualKeyStates.Down;
+ int index = reverse ?
+ (playbackItem.VideoTracks.SelectedIndex - 1) % playbackItem.VideoTracks.Count :
(playbackItem.VideoTracks.SelectedIndex + 1) % playbackItem.VideoTracks.Count;
+ playbackItem.VideoTracks.SelectedIndex = index;
}
}
+
+ if (args.Key == VirtualKey.Right && FFmpegMSS != null && mediaPlayer.PlaybackSession.CanSeek)
+ {
+ mediaPlayer.PlaybackSession.Position += TimeSpan.FromSeconds(5);
+ }
+
+ if (args.Key == VirtualKey.Left && FFmpegMSS != null && mediaPlayer.PlaybackSession.CanSeek)
+ {
+ mediaPlayer.PlaybackSession.Position -= TimeSpan.FromSeconds(5);
+ }
}
private void CodecChecker_CodecRequired(object sender, CodecRequiredEventArgs args)
diff --git a/Samples/MediaPlayerWinUI/MainWindow.xaml.cs b/Samples/MediaPlayerWinUI/MainWindow.xaml.cs
index be61c07edd..1f880856fa 100644
--- a/Samples/MediaPlayerWinUI/MainWindow.xaml.cs
+++ b/Samples/MediaPlayerWinUI/MainWindow.xaml.cs
@@ -1,40 +1,4 @@
using Microsoft.UI.Xaml;
-using Microsoft.UI.Xaml.Controls;
-using Microsoft.UI.Xaml.Controls.Primitives;
-using Microsoft.UI.Xaml.Data;
-using Microsoft.UI.Xaml.Input;
-using Microsoft.UI.Xaml.Media;
-using Microsoft.UI.Xaml.Navigation;
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using System.Runtime.InteropServices.WindowsRuntime;
-using Windows.Foundation;
-using Windows.Foundation.Collections;
-
-using FFmpegInteropX;
-
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading.Tasks;
-using Windows.ApplicationModel.DataTransfer;
-using Windows.ApplicationModel.Core;
-using Windows.Foundation.Collections;
-using Windows.Media.Core;
-using Windows.Media.Playback;
-using Windows.Storage;
-using Windows.Storage.AccessCache;
-using Windows.Storage.Pickers;
-using Windows.Storage.Streams;
-using Windows.System;
-using Windows.UI.Core;
-using Windows.UI.Popups;
-using Windows.UI.Xaml;
-using Microsoft.UI.Xaml.Controls;
-using Microsoft.UI.Xaml.Input;
-using System.IO;
// To learn more about WinUI, the WinUI project structure,
// and more about our project templates, see: http://aka.ms/winui-project-info.
diff --git a/Source/D3D11VideoSampleProvider.h b/Source/D3D11VideoSampleProvider.h
index 7edd984809..8194b68a01 100644
--- a/Source/D3D11VideoSampleProvider.h
+++ b/Source/D3D11VideoSampleProvider.h
@@ -49,11 +49,11 @@ class D3D11VideoSampleProvider : public UncompressedVideoSampleProvider, public
ReleaseTrackedSamples();
}
- virtual void Flush() override
- {
- UncompressedVideoSampleProvider::Flush();
- ReturnTrackedSamples();
- }
+ virtual void Flush(bool flushBuffers) override
+ {
+ UncompressedVideoSampleProvider::Flush(flushBuffers);
+ ReturnTrackedSamples();
+ }
virtual HRESULT CreateBufferFromFrame(IBuffer* pBuffer, IDirect3DSurface* surface, AVFrame* avFrame, int64_t& framePts, int64_t& frameDuration) override
diff --git a/Source/FFmpegInteropX.idl b/Source/FFmpegInteropX.idl
index f3bb3eddd5..d9c1666b93 100644
--- a/Source/FFmpegInteropX.idl
+++ b/Source/FFmpegInteropX.idl
@@ -453,6 +453,9 @@ namespace FFmpegInteropX
///The subtitle stream.
Windows.Foundation.IAsyncOperation > AddExternalSubtitleAsync(Windows.Storage.Streams.IRandomAccessStream stream);
+ ///Starts filling the read-ahead buffer, if enabled in the configuration.
+ void StartBuffering();
+
// Properties
///Gets the configuration that has been passed when creating the MSS instance.
@@ -566,20 +569,38 @@ namespace FFmpegInteropX
Double MaxSupportedPlaybackRate{ get; set; };
///The buffer size in bytes to use for Windows.Storage.Streams.IRandomAccessStream sources.
+ [deprecated("Deprecated due to irritating name. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.", deprecate, 1)]
UInt32 StreamBufferSize{ get; set; };
+ ///The maximum number of bytes to read in one chunk for Windows.Storage.Streams.IRandomAccessStream sources.
+ UInt32 FileStreamReadSize{ get; set; };
+
///Additional options to use when creating the ffmpeg AVFormatContext.
Windows.Foundation.Collections.PropertySet FFmpegOptions{ get; set; };
///The default BufferTime that gets assigned to the MediaStreamSource for Windows.Storage.Streams.IRandomAccessStream sources.
- ///A value of 0 is recommended for local files, to avoid framework bugs and unneccessary memory consumption.
+ ///Deprecated due to framework bugs and memory consumption. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.
+ [deprecated("Deprecated due to framework bugs and memory consumption. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.", deprecate, 1)]
Windows.Foundation.TimeSpan DefaultBufferTime{ get; set; };
///The default BufferTime that gets assigned to the MediaStreamSource for URI sources.
- ///Default is 5 seconds. You might want to use higher values, especially for DASH stream sources.
+ ///Deprecated due to framework bugs and memory consumption. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.
+ [deprecated("Deprecated due to framework bugs and memory consumption. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.", deprecate, 1)]
Windows.Foundation.TimeSpan DefaultBufferTimeUri{ get; set; };
+ ///Enables or disables the read-ahead buffer.
+ ///This value can be changed any time during playback.
+ Boolean ReadAheadBufferEnabled{ get; set; };
+
+ ///The maximum number of bytes to buffer ahead per stream.
+ ///This value can be changed any time during playback.
+ Int64 ReadAheadBufferSize{ get; set; };
+
+ ///The maximum duration to buffer ahead per stream.
+ ///This value can be changed any time during playback.
+ Windows.Foundation.TimeSpan ReadAheadBufferDuration{ get; set; };
+
///Automatically select subtitles when they have the 'forced' flag set.
Boolean AutoSelectForcedSubtitles{ get; set; };
@@ -653,12 +674,6 @@ namespace FFmpegInteropX
Boolean DownmixAudioStreamsToStereo{ get; set; };
};
- [default_interface]
- runtimeclass ReferenceCue : Windows.Media.Core.IMediaCue
- {
- ReferenceCue(Windows.Media.Core.IMediaCue other);
- }
-
[default_interface]
#ifdef CPPCX
[bindable]
diff --git a/Source/FFmpegInteropX.vcxproj b/Source/FFmpegInteropX.vcxproj
index f9b240ef70..0c932d5f0d 100644
--- a/Source/FFmpegInteropX.vcxproj
+++ b/Source/FFmpegInteropX.vcxproj
@@ -98,6 +98,7 @@
%(AdditionalOptions) /bigobj
28204;4635;4634
true
+ stdcpp17
Console
@@ -159,6 +160,7 @@
+
diff --git a/Source/FFmpegInteropX.vcxproj.filters b/Source/FFmpegInteropX.vcxproj.filters
index 24d40ebc54..016a3628a8 100644
--- a/Source/FFmpegInteropX.vcxproj.filters
+++ b/Source/FFmpegInteropX.vcxproj.filters
@@ -93,6 +93,7 @@
+
Helpers
diff --git a/Source/FFmpegMediaSource.cpp b/Source/FFmpegMediaSource.cpp
index 21e54f4e4b..6cdbc61957 100644
--- a/Source/FFmpegMediaSource.cpp
+++ b/Source/FFmpegMediaSource.cpp
@@ -131,7 +131,7 @@ namespace winrt::FFmpegInteropX::implementation
{
// Setup FFmpeg custom IO to access file as stream. This is necessary when accessing any file outside of app installation directory and appdata folder.
// Credit to Philipp Sch http://www.codeproject.com/Tips/489450/Creating-Custom-FFmpeg-IO-Context
- fileStreamBuffer = (unsigned char*)av_malloc(config->StreamBufferSize());
+ fileStreamBuffer = (unsigned char*)av_malloc(config->FileStreamReadSize());
if (fileStreamBuffer == nullptr)
{
hr = E_OUTOFMEMORY;
@@ -429,7 +429,7 @@ namespace winrt::FFmpegInteropX::implementation
if (SUCCEEDED(hr))
{
- m_pReader = std::shared_ptr(new FFmpegReader(avFormatCtx, &sampleProviders));
+ m_pReader = std::shared_ptr(new FFmpegReader(avFormatCtx, &sampleProviders, config.as()));
if (m_pReader == nullptr)
{
hr = E_OUTOFMEMORY;
@@ -667,7 +667,7 @@ namespace winrt::FFmpegInteropX::implementation
mediaDuration = TimeSpan(LONGLONG(avFormatCtx->duration * 10000000 / double(AV_TIME_BASE)));
// Assign initial BufferTime to MediaStreamSource
- mss.BufferTime(fileStreamData ? config->DefaultBufferTime() : config->DefaultBufferTimeUri());
+ mss.BufferTime(TimeSpan{ 0 });
mss.MaxSupportedPlaybackRate(config->MaxSupportedPlaybackRate());
if (mediaDuration.count() > 0)
@@ -824,7 +824,7 @@ namespace winrt::FFmpegInteropX::implementation
unsigned threads = std::thread::hardware_concurrency();
if (threads > 0)
{
- avAudioCodecCtx->thread_count = config->MaxAudioThreads() == 0 ? threads : min(threads, config->MaxAudioThreads());
+ avAudioCodecCtx->thread_count = config->MaxAudioThreads() == 0 ? threads : min((int)threads, config->MaxAudioThreads());
avAudioCodecCtx->thread_type = FF_THREAD_FRAME | FF_THREAD_SLICE;
}
@@ -976,7 +976,7 @@ namespace winrt::FFmpegInteropX::implementation
if (!avVideoCodecCtx->hw_device_ctx)
{
unsigned threads = std::thread::hardware_concurrency();
- avVideoCodecCtx->thread_count = config->MaxVideoThreads() == 0 ? threads : min(threads, config->MaxVideoThreads());
+ avVideoCodecCtx->thread_count = config->MaxVideoThreads() == 0 ? threads : min((int)threads, config->MaxVideoThreads());
avVideoCodecCtx->thread_type = config->IsFrameGrabber ? FF_THREAD_SLICE : FF_THREAD_FRAME | FF_THREAD_SLICE;
}
@@ -1273,6 +1273,12 @@ namespace winrt::FFmpegInteropX::implementation
return AddExternalSubtitleAsync(stream, config->DefaultExternalSubtitleStreamName());
}
+ void FFmpegMediaSource::StartBuffering()
+ {
+ std::lock_guard lock(mutex);
+ m_pReader->Start();
+ }
+
FFmpegInteropX::MediaSourceConfig FFmpegMediaSource::Configuration()
{
return config.as();
@@ -1394,6 +1400,8 @@ namespace winrt::FFmpegInteropX::implementation
if (m_pReader != nullptr)
{
+ m_pReader->Stop();
+ m_pReader->Flush();
m_pReader.reset();;
}
@@ -1669,60 +1677,67 @@ namespace winrt::FFmpegInteropX::implementation
{
std::lock_guard lock(mutex);
MediaStreamSourceStartingRequest request = args.Request();
-
- if (isFirstSeek && avHardwareContext)
+
+ try
{
- HRESULT hr = DirectXInteropHelper::GetDeviceManagerFromStreamSource(sender, deviceManager);
- if (SUCCEEDED(hr))
- hr = D3D11VideoSampleProvider::InitializeHardwareDeviceContext(sender, avHardwareContext, device, deviceContext, deviceManager, &deviceHandle);
-
- if (SUCCEEDED(hr))
+ if (isFirstSeek && avHardwareContext)
{
- // assign device and context
- for (auto &stream : videoStreams)
- {
- // set device pointers to stream
- hr = stream->SetHardwareDevice(device, deviceContext, avHardwareContext);
+ HRESULT hr = DirectXInteropHelper::GetDeviceManagerFromStreamSource(sender, deviceManager);
+ if (SUCCEEDED(hr))
+ hr = D3D11VideoSampleProvider::InitializeHardwareDeviceContext(sender, avHardwareContext, device, deviceContext, deviceManager, &deviceHandle);
- if (!SUCCEEDED(hr))
+ if (SUCCEEDED(hr))
+ {
+ // assign device and context
+ for (auto& stream : videoStreams)
{
- break;
+ // set device pointers to stream
+ hr = stream->SetHardwareDevice(device, deviceContext, avHardwareContext);
+
+ if (!SUCCEEDED(hr))
+ {
+ break;
+ }
}
}
- }
- else
- {
- // unref all hw device contexts
- for (auto &stream : videoStreams)
+ else
{
- stream->FreeHardwareDevice();
+ // unref all hw device contexts
+ for (auto& stream : videoStreams)
+ {
+ stream->FreeHardwareDevice();
+ }
+ av_buffer_unref(&avHardwareContext);
+ device = nullptr;
+ deviceContext = nullptr;
}
- av_buffer_unref(&avHardwareContext);
- device = nullptr;
- deviceContext = nullptr;
}
- }
- // Perform seek operation when MediaStreamSource received seek event from MediaElement
- if (request.StartPosition() && request.StartPosition().Value().count() <= mediaDuration.count() && (!isFirstSeek || request.StartPosition().Value().count() > 0))
- {
- if (currentVideoStream && !currentVideoStream->IsEnabled())
+ // Perform seek operation when MediaStreamSource received seek event from MediaElement
+ if (request.StartPosition() && request.StartPosition().Value().count() <= mediaDuration.count() && (!isFirstSeek || request.StartPosition().Value().count() > 0))
{
- currentVideoStream->EnableStream();
- }
+ if (currentVideoStream && !currentVideoStream->IsEnabled())
+ {
+ currentVideoStream->EnableStream();
+ }
- if (currentAudioStream && !currentAudioStream->IsEnabled())
- {
- currentAudioStream->EnableStream();
- }
+ if (currentAudioStream && !currentAudioStream->IsEnabled())
+ {
+ currentAudioStream->EnableStream();
+ }
- TimeSpan actualPosition = request.StartPosition().Value();
- auto hr = Seek(request.StartPosition().Value(), actualPosition, true);
- if (SUCCEEDED(hr))
- {
- request.SetActualStartPosition(actualPosition);
+ TimeSpan actualPosition = request.StartPosition().Value();
+ auto hr = Seek(request.StartPosition().Value(), actualPosition, true);
+ if (SUCCEEDED(hr))
+ {
+ request.SetActualStartPosition(actualPosition);
+ }
}
}
+ catch (...)
+ {
+ DebugMessage(L"Exception in OnStarting()!");
+ }
isFirstSeek = false;
isFirstSeekAfterStreamSwitch = false;
@@ -1732,24 +1747,36 @@ namespace winrt::FFmpegInteropX::implementation
{
UNREFERENCED_PARAMETER(sender);
std::lock_guard lock(mutex);
- if (mss != nullptr)
+
+ try
{
- if (currentAudioStream && args.Request().StreamDescriptor() == currentAudioStream->StreamDescriptor())
- {
- auto sample = currentAudioStream->GetNextSample();
- args.Request().Sample(sample);
- }
- else if (currentVideoStream && args.Request().StreamDescriptor() == currentVideoStream->StreamDescriptor())
+ if (mss != nullptr)
{
- CheckVideoDeviceChanged();
- auto sample = currentVideoStream->GetNextSample();
- args.Request().Sample(sample);
- }
- else
- {
- args.Request().Sample(nullptr);
+ if (config->ReadAheadBufferEnabled())
+ {
+ m_pReader->Start();
+ }
+ if (currentAudioStream && args.Request().StreamDescriptor() == currentAudioStream->StreamDescriptor())
+ {
+ auto sample = currentAudioStream->GetNextSample();
+ args.Request().Sample(sample);
+ }
+ else if (currentVideoStream && args.Request().StreamDescriptor() == currentVideoStream->StreamDescriptor())
+ {
+ CheckVideoDeviceChanged();
+ auto sample = currentVideoStream->GetNextSample();
+ args.Request().Sample(sample);
+ }
+ else
+ {
+ args.Request().Sample(nullptr);
+ }
}
}
+ catch (...)
+ {
+ DebugMessage(L"Exception in OnSampleRequested()!");
+ }
}
@@ -1842,223 +1869,78 @@ namespace winrt::FFmpegInteropX::implementation
UNREFERENCED_PARAMETER(sender);
std::lock_guard lock(mutex);
- if (currentAudioStream && args.Request().OldStreamDescriptor() == currentAudioStream->StreamDescriptor())
- {
- if (!currentAudioEffects.empty())
- {
- currentAudioStream->DisableFilters();
- }
- currentAudioStream->DisableStream();
- currentAudioStream = nullptr;
- }
- if (currentVideoStream && args.Request().OldStreamDescriptor() == currentVideoStream->StreamDescriptor())
+ try
{
- currentVideoStream->DisableStream();
- currentVideoStream = nullptr;
- }
+ m_pReader->Stop();
+ m_pReader->Flush();
- for (auto &stream : audioStreams)
- {
- if (stream->StreamDescriptor() == args.Request().NewStreamDescriptor())
+ if (currentAudioStream && args.Request().OldStreamDescriptor() == currentAudioStream->StreamDescriptor())
{
- currentAudioStream = stream;
- currentAudioStream->EnableStream();
if (!currentAudioEffects.empty())
{
- currentAudioStream->SetFilters(currentAudioEffects);
+ currentAudioStream->DisableFilters();
}
+ currentAudioStream->DisableStream();
+ currentAudioStream = nullptr;
}
- }
- for (auto &stream : videoStreams)
- {
- if (stream->StreamDescriptor() == args.Request().NewStreamDescriptor())
+ if (currentVideoStream && args.Request().OldStreamDescriptor() == currentVideoStream->StreamDescriptor())
{
- currentVideoStream = stream;
- currentVideoStream->EnableStream();
+ currentVideoStream->DisableStream();
+ currentVideoStream = nullptr;
}
- }
-
- isFirstSeekAfterStreamSwitch = config->FastSeekSmartStreamSwitching();
- }
- HRESULT FFmpegMediaSource::Seek(TimeSpan const& position, TimeSpan& actualPosition, bool allowFastSeek)
- {
- auto hr = S_OK;
-
- // Select the first valid stream either from video or audio
- auto stream = currentVideoStream ? currentVideoStream : currentAudioStream;
-
- if (stream)
- {
- int64_t seekTarget = stream->ConvertPosition(position);
- auto diffActual = position - currentPosition;
- auto diffLast = position - lastPosition;
- bool isSeekBeforeStreamSwitch = PlaybackSession() && config->FastSeekSmartStreamSwitching() && diffActual.count() > 0 && diffActual.count() < 5000000 && diffLast.count() > 0 && diffLast.count() < 10000000;
-
- if (currentVideoStream && config->FastSeek() && allowFastSeek && PlaybackSession() && !isSeekBeforeStreamSwitch && !isFirstSeekAfterStreamSwitch)
+ for (auto& stream : audioStreams)
{
- // fast seek
- auto playbackPosition = PlaybackSession() ? lastPosition : currentVideoStream->LastSampleTimestamp;
- bool seekForward;
- TimeSpan referenceTime;
-
- // decide seek direction
- if (isLastSeekForward && position > lastSeekStart && position <= lastSeekActual)
- {
- seekForward = true;
- referenceTime = lastSeekStart + ((position - lastSeekStart) * 0.2);
- DebugMessage(L" - ### Forward seeking continue\n");
- }
- else if (!isLastSeekForward && position < lastSeekStart && position >= lastSeekActual)
- {
- seekForward = false;
- referenceTime = lastSeekStart + ((position - lastSeekStart) * 0.2);
- DebugMessage(L" - ### Backward seeking continue\n");
- }
- else if (position >= playbackPosition)
- {
- seekForward = true;
- referenceTime = playbackPosition + ((position - playbackPosition) * 0.2);
- DebugMessage(L" - ### Forward seeking\n");
- }
- else
- {
- seekForward = false;
- referenceTime = playbackPosition + ((position - playbackPosition) * 0.2);
- DebugMessage(L" - ### Backward seeking\n");
- }
-
- int64_t min = INT64_MIN;
- int64_t max = INT64_MAX;
- if (seekForward)
+ if (stream->StreamDescriptor() == args.Request().NewStreamDescriptor())
{
- min = stream->ConvertPosition(referenceTime);
- }
- else
- {
- max = stream->ConvertPosition(referenceTime);
- }
-
- if (avformat_seek_file(avFormatCtx, stream->StreamIndex(), min, seekTarget, max, 0) < 0)
- {
- hr = E_FAIL;
- DebugMessage(L" - ### Error while seeking\n");
- }
- else
- {
- // Flush all active streams
- FlushStreams();
-
- // get and apply keyframe position for fast seeking
- TimeSpan timestampVideo;
- TimeSpan timestampVideoDuration;
- hr = currentVideoStream->GetNextPacketTimestamp(timestampVideo, timestampVideoDuration);
-
- while (hr == S_OK && seekForward && timestampVideo < referenceTime)
+ currentAudioStream = stream;
+ currentAudioStream->EnableStream();
+ if (!currentAudioEffects.empty())
{
- // our min position was not respected. try again with higher min and target.
- min += stream->ConvertDuration(TimeSpan{ 50000000 });
- seekTarget += stream->ConvertDuration(TimeSpan{ 50000000 });
-
- if (avformat_seek_file(avFormatCtx, stream->StreamIndex(), min, seekTarget, max, 0) < 0)
- {
- hr = E_FAIL;
- DebugMessage(L" - ### Error while seeking\n");
- }
- else
- {
- // Flush all active streams
- FlushStreams();
-
- // get updated timestamp
- hr = currentVideoStream->GetNextPacketTimestamp(timestampVideo, timestampVideoDuration);
- }
- }
-
- if (hr == S_OK)
- {
- actualPosition = timestampVideo;
-
- // remember last seek direction
- isLastSeekForward = seekForward;
- lastSeekStart = position;
- lastSeekActual = actualPosition;
-
- if (currentAudioStream)
- {
- // if we have audio, we need to seek back a bit more to get 100% clean audio
- TimeSpan timestampAudio;
- TimeSpan timestampAudioDuration;
- hr = currentAudioStream->GetNextPacketTimestamp(timestampAudio, timestampAudioDuration);
- if (hr == S_OK)
- {
- // audio stream should start one sample before video
- auto audioTarget = timestampVideo - timestampAudioDuration;
- auto audioPreroll = timestampAudio - timestampVideo;
- if (audioPreroll.count() > 0 && config->FastSeekCleanAudio())
- {
- seekTarget = stream->ConvertPosition(audioTarget - audioPreroll);
- if (av_seek_frame(avFormatCtx, stream->StreamIndex(), seekTarget, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY) < 0)
- {
- hr = E_FAIL;
- DebugMessage(L" - ### Error while seeking\n");
- }
- else
- {
- FlushStreams();
-
- // Now drop all packets until desired keyframe position
- currentVideoStream->SkipPacketsUntilTimestamp(timestampVideo);
- currentAudioStream->SkipPacketsUntilTimestamp(audioTarget);
-
- auto sample = currentAudioStream->GetNextSample();
- if (sample)
- {
- actualPosition = sample.Timestamp() + sample.Duration();
- }
- }
- }
- else if (audioPreroll.count() <= 0)
- {
- // Negative audio preroll. Just drop all packets until target position.
- currentAudioStream->SkipPacketsUntilTimestamp(audioTarget);
-
- hr = currentAudioStream->GetNextPacketTimestamp(timestampAudio, timestampAudioDuration);
- if (hr == S_OK && (config->FastSeekCleanAudio() || (timestampAudio + timestampAudioDuration) <= timestampVideo))
- {
- // decode one audio sample to get clean output
- auto sample = currentAudioStream->GetNextSample();
- if (sample)
- {
- actualPosition = sample.Timestamp() + sample.Duration();
- }
- }
- }
- }
- }
+ currentAudioStream->SetFilters(currentAudioEffects);
}
}
}
- else
+ for (auto& stream : videoStreams)
{
- if (av_seek_frame(avFormatCtx, stream->StreamIndex(), seekTarget, AVSEEK_FLAG_BACKWARD) < 0)
+ if (stream->StreamDescriptor() == args.Request().NewStreamDescriptor())
{
- hr = E_FAIL;
- DebugMessage(L" - ### Error while seeking\n");
- }
- else
- {
- // Flush all active streams
- FlushStreams();
+ currentVideoStream = stream;
+ currentVideoStream->EnableStream();
}
}
+
+ isFirstSeekAfterStreamSwitch = config->FastSeekSmartStreamSwitching();
}
- else
+ catch (...)
{
- hr = E_FAIL;
+ DebugMessage(L"Exception in OnSwitchStreamsRequested()!");
}
+ }
- return hr;
+ HRESULT FFmpegMediaSource::Seek(const TimeSpan& position, TimeSpan& actualPosition, bool allowFastSeek)
+ {
+ DebugMessage(L"Seek\n");
+
+ auto diffCurrent = position - currentPosition;
+ auto diffLast = position - lastPosition;
+ bool isSeekBeforeStreamSwitch = allowFastSeek && config->FastSeekSmartStreamSwitching() && !isFirstSeekAfterStreamSwitch && diffCurrent.count() > 0 && diffCurrent.count() < 5000000 && diffLast.count() > 0 && diffLast.count() < 10000000;
+
+ bool fastSeek = allowFastSeek && config->FastSeek() && currentVideoStream && PlaybackSession() && !isFirstSeekAfterStreamSwitch;
+ if (isSeekBeforeStreamSwitch)
+ {
+ return S_OK;
+ }
+ else if (position == currentPosition && position == lastPosition && position == lastSeek && !isFirstSeekAfterStreamSwitch && position.count() > 0)
+ {
+ DebugMessage(L"Skipping double seek request.\n");
+ return S_OK;
+ }
+ else
+ {
+ lastSeek = position;
+ return m_pReader->Seek(position, actualPosition, lastPosition, fastSeek, currentVideoStream, currentAudioStream);
+ }
}
void FFmpegMediaSource::OnPositionChanged(MediaPlaybackSession const& sender, IInspectable const& args)
diff --git a/Source/FFmpegMediaSource.h b/Source/FFmpegMediaSource.h
index 75ee10c5f6..b94eaa8875 100644
--- a/Source/FFmpegMediaSource.h
+++ b/Source/FFmpegMediaSource.h
@@ -93,6 +93,10 @@ namespace winrt::FFmpegInteropX::implementation
///The subtitle stream.
IAsyncOperation> AddExternalSubtitleAsync(IRandomAccessStream stream);
+ ///Starts filling the read-ahead buffer, if enabled in the configuration.
+ ///Let the stream buffer fill before starting playback.
+ void StartBuffering();
+
///Gets the configuration that has been passed when creating the MSS instance.
FFmpegInteropX::MediaSourceConfig Configuration();
@@ -168,17 +172,6 @@ namespace winrt::FFmpegInteropX::implementation
void InitializePlaybackItem(MediaPlaybackItem const& playbackitem);
bool CheckUseHardwareAcceleration(AVCodecContext* avCodecCtx, HardwareAccelerationStatus const& status, HardwareDecoderStatus& hardwareDecoderStatus, int maxProfile, int maxLevel);
- void FlushStreams()
- {
- // Flush all active streams
- for (auto &stream : sampleProviders)
- {
- if (stream && stream->IsEnabled())
- {
- stream->Flush();
- }
- }
- }
public://internal:
static winrt::com_ptr CreateFromStream(IRandomAccessStream const& stream, winrt::com_ptr const& config, DispatcherQueue const& dispatcher);
@@ -192,9 +185,9 @@ namespace winrt::FFmpegInteropX::implementation
}
std::shared_ptr m_pReader;
- AVDictionary* avDict = NULL;
- AVIOContext* avIOCtx = NULL;
- AVFormatContext* avFormatCtx = NULL;
+ AVDictionary* avDict = nullptr;
+ AVIOContext* avIOCtx = nullptr;
+ AVFormatContext* avFormatCtx = nullptr;
winrt::com_ptr fileStreamData = { nullptr };
ByteOrderMark streamByteOrderMark;
winrt::com_ptr config = { nullptr };
@@ -202,7 +195,6 @@ namespace winrt::FFmpegInteropX::implementation
private:
-
MediaStreamSource mss = { nullptr };
winrt::event_token startingRequestedToken{};
winrt::event_token sampleRequestedToken{};
@@ -214,7 +206,7 @@ namespace winrt::FFmpegInteropX::implementation
std::vector> sampleProviders;
std::vector> audioStreams;
- std::vector< std::shared_ptr> subtitleStreams;
+ std::vector> subtitleStreams;
std::vector> videoStreams;
std::shared_ptr currentVideoStream;
@@ -241,25 +233,22 @@ namespace winrt::FFmpegInteropX::implementation
MediaPlaybackSession session = { nullptr };
winrt::event_token sessionPositionEvent{};
- hstring videoCodecName{};
- hstring audioCodecName{};
TimeSpan mediaDuration{};
TimeSpan subtitleDelay{};
- bool isFirstSeek;
- AVBufferRef* avHardwareContext = NULL;
- AVBufferRef* avHardwareContextDefault = NULL;
+
+ AVBufferRef* avHardwareContext = nullptr;
+ AVBufferRef* avHardwareContextDefault = nullptr;
com_ptr device;
com_ptr deviceContext;
- HANDLE deviceHandle = NULL;
+ HANDLE deviceHandle = nullptr;
com_ptr deviceManager;
+ bool isFirstSeek;
bool isFirstSeekAfterStreamSwitch = false;
- bool isLastSeekForward = false;
- TimeSpan lastSeekStart{ 0 };
- TimeSpan lastSeekActual{ 0 };
TimeSpan currentPosition{ 0 };
TimeSpan lastPosition{ 0 };
+ TimeSpan lastSeek{ 0 };
static DispatcherQueue GetCurrentDispatcher();
void OnPositionChanged(MediaPlaybackSession const& sender, IInspectable const& args);
diff --git a/Source/FFmpegReader.cpp b/Source/FFmpegReader.cpp
index 9af8f94667..de72f40987 100644
--- a/Source/FFmpegReader.cpp
+++ b/Source/FFmpegReader.cpp
@@ -18,65 +18,632 @@
#include "pch.h"
#include "FFmpegReader.h"
-extern "C"
-{
-#include "libavformat/avformat.h"
-}
+#include "StreamBuffer.h"
+#include "UncompressedSampleProvider.h"
+using namespace Concurrency;
-FFmpegReader::FFmpegReader(AVFormatContext* avFormatCtx, std::vector>* initProviders)
- : m_pAvFormatCtx(avFormatCtx)
+FFmpegReader::FFmpegReader(AVFormatContext* avFormatCtx, std::vector>* initProviders, MediaSourceConfig config)
+ : avFormatCtx(avFormatCtx)
, sampleProviders(initProviders)
+ , config(config)
{
}
FFmpegReader::~FFmpegReader()
{
- DebugMessage(L"FFMpeg reader destroyed\n");
+}
+
+void FFmpegReader::Start()
+{
+ std::lock_guard lock(mutex);
+ if (!isActive && !isEof && config.ReadAheadBufferEnabled() && (config.ReadAheadBufferSize() > 0 || config.ReadAheadBufferDuration().count() > 0) && !config.as()->IsFrameGrabber)
+ {
+ sleepTimerTarget = new call([this](int value) { OnTimer(value); });
+ if (!sleepTimerTarget)
+ {
+ return;
+ }
+
+ sleepTimer = new timer(100u, 0, sleepTimerTarget, true);
+ if (!sleepTimer)
+ {
+ delete sleepTimerTarget;
+ return;
+ }
+
+ readTask = create_task([this]() { this->ReadDataLoop(); });
+ isActive = true;
+ }
+}
+
+void FFmpegReader::Stop()
+{
+ bool wait = false;
+ {
+ std::lock_guard lock(mutex);
+ if (isActive)
+ {
+ isActive = false;
+ wait = true;
+
+ sleepTimer->stop();
+ }
+ }
+
+ if (wait)
+ {
+ try
+ {
+ readTask.wait();
+ }
+ catch (...)
+ {
+ DebugMessage(L"Failed to wait for task. Probably destructor called from UI thread.\n");
+
+ while (!readTask.is_done())
+ {
+ Sleep(1);
+ }
+ }
+ delete sleepTimer;
+ delete sleepTimerTarget;
+ }
+
+ if (forceReadStream != -1)
+ {
+ waitStreamEvent.set();
+ }
+ forceReadStream = -1;
+ lastStream = nullptr;
+ fullStream = nullptr;
+}
+
+void FFmpegReader::Flush()
+{
+ std::lock_guard lock(mutex);
+ FlushCodecsAndBuffers();
+}
+
+void FFmpegReader::FlushCodecs()
+{
+ DebugMessage(L"FlushCodecs\n");
+ for (auto& stream : *sampleProviders)
+ {
+ if (stream)
+ stream->Flush(false);
+ }
+}
+
+void FFmpegReader::FlushCodecsAndBuffers()
+{
+ DebugMessage(L"FlushCodecsAndBuffers\n");
+ for (auto& stream : *sampleProviders)
+ {
+ if (stream)
+ stream->Flush(true);
+ }
+ readResult = 0;
+ isEof = false;
+}
+
+HRESULT FFmpegReader::Seek(TimeSpan position, TimeSpan& actualPosition, TimeSpan currentPosition, bool fastSeek, std::shared_ptr videoStream, std::shared_ptr audioStream)
+{
+ Stop();
+
+ std::lock_guard lock(mutex);
+
+ auto hr = S_OK;
+ if (readResult != 0)
+ {
+ fastSeek = false;
+ readResult = 0;
+ }
+
+ auto isForwardSeek = position > currentPosition;
+
+ if (isForwardSeek && TrySeekBuffered(position, actualPosition, fastSeek, isForwardSeek, videoStream, audioStream))
+ {
+ // all good
+ DebugMessage(L"BufferedSeek!\n");
+ }
+ else if (fastSeek)
+ {
+ hr = SeekFast(position, actualPosition, currentPosition, videoStream, audioStream);
+ }
+ else
+ {
+ DebugMessage(L"NormalSeek\n");
+ // Select the first valid stream either from video or audio
+ auto stream = videoStream ? videoStream : audioStream;
+ int64_t seekTarget = stream->ConvertPosition(position);
+ if (av_seek_frame(avFormatCtx, stream->StreamIndex(), seekTarget, AVSEEK_FLAG_BACKWARD) < 0)
+ {
+ hr = E_FAIL;
+ DebugMessage(L" - ### Error while seeking\n");
+ }
+ else
+ {
+ // Flush all active streams with buffers
+ FlushCodecsAndBuffers();
+ }
+ }
+
+ return hr;
+}
+
+HRESULT FFmpegReader::SeekFast(TimeSpan position, TimeSpan& actualPosition, TimeSpan currentPosition, std::shared_ptr videoStream, std::shared_ptr audioStream)
+{
+ DebugMessage(L"SeekFast\n");
+
+ HRESULT hr = S_OK;
+ int64_t seekTarget = videoStream->ConvertPosition(position);
+ bool isUriSource = avFormatCtx->url;
+
+ // fast seek
+ bool seekForward;
+ TimeSpan referenceTime;
+
+ // decide seek direction
+ if (isLastSeekForward && position > lastSeekStart && position <= lastSeekActual)
+ {
+ seekForward = true;
+ referenceTime = lastSeekStart + ((position - lastSeekStart) * 0.2);
+ DebugMessage(L" - ### Forward seeking continue\n");
+ }
+ else if (!isLastSeekForward && position < lastSeekStart && position >= lastSeekActual)
+ {
+ seekForward = false;
+ referenceTime = lastSeekStart + ((position - lastSeekStart) * 0.2);
+ DebugMessage(L" - ### Backward seeking continue\n");
+ }
+ else if (position >= currentPosition)
+ {
+ seekForward = true;
+ referenceTime = currentPosition + ((position - currentPosition) * 0.2);
+ DebugMessage(L" - ### Forward seeking\n");
+ }
+ else
+ {
+ seekForward = false;
+ referenceTime = currentPosition + ((position - currentPosition) * 0.2);
+ DebugMessage(L" - ### Backward seeking\n");
+ }
+
+ int64_t min = INT64_MIN;
+ int64_t max = INT64_MAX;
+ if (seekForward)
+ {
+ min = videoStream->ConvertPosition(referenceTime);
+ }
+ else
+ {
+ max = videoStream->ConvertPosition(referenceTime);
+ }
+
+ if (avformat_seek_file(avFormatCtx, videoStream->StreamIndex(), min, seekTarget, max, 0) < 0)
+ {
+ hr = E_FAIL;
+ DebugMessage(L" - ### Error while seeking\n");
+ }
+ else
+ {
+ // Flush all active streams with buffers
+ FlushCodecsAndBuffers();
+
+ // get and apply keyframe position for fast seeking
+ TimeSpan timestampVideo;
+ TimeSpan timestampVideoDuration;
+ hr = videoStream->GetNextPacketTimestamp(timestampVideo, timestampVideoDuration);
+ bool hasVideoPts = hr == S_OK;
+
+ if (hr == S_FALSE)
+ {
+ // S_FALSE means that the video packets do not contain timestamps
+ if (std::dynamic_pointer_cast(videoStream))
+ {
+ // If we do not use passthrough, we can decode (and drop) then next sample to get the correct time.
+ auto sample = videoStream->GetNextSample();
+ if (sample)
+ {
+ timestampVideo = sample.Timestamp();
+ timestampVideoDuration = sample.Duration();
+ timestampVideo += timestampVideoDuration;
+ hr = S_OK;
+ }
+ }
+ else
+ {
+ // Otherwise, try with audio stream instead
+ hr = audioStream->GetNextPacketTimestamp(timestampVideo, timestampVideoDuration);
+ }
+ }
+
+ int seekCount = 0;
+ while (hr == S_OK && seekForward && timestampVideo < referenceTime && !isUriSource && hasVideoPts && seekCount++ < 10)
+ {
+ // our min position was not respected. try again with higher min and target.
+ min += videoStream->ConvertDuration(TimeSpan{ 50000000 });
+ seekTarget += videoStream->ConvertDuration(TimeSpan{ 50000000 });
+
+ if (avformat_seek_file(avFormatCtx, videoStream->StreamIndex(), min, seekTarget, max, 0) < 0)
+ {
+ hr = E_FAIL;
+ DebugMessage(L" - ### Error while seeking\n");
+ }
+ else
+ {
+ // Flush all active streams with buffers
+ FlushCodecsAndBuffers();
+
+ // get updated timestamp
+ hr = videoStream->GetNextPacketTimestamp(timestampVideo, timestampVideoDuration);
+ }
+ }
+
+ if (hr == S_OK)
+ {
+ actualPosition = timestampVideo;
+
+ // remember last seek direction
+ isLastSeekForward = seekForward;
+ lastSeekStart = position;
+ lastSeekActual = actualPosition;
+
+ if (audioStream)
+ {
+ // if we have audio, we need to seek back a bit more to get 100% clean audio
+ TimeSpan timestampAudio;
+ TimeSpan timestampAudioDuration;
+ hr = audioStream->GetNextPacketTimestamp(timestampAudio, timestampAudioDuration);
+ if (hr == S_OK)
+ {
+ // audio stream should start one sample before video
+ auto audioTarget = timestampVideo - timestampAudioDuration;
+ auto audioPreroll = timestampAudio - timestampVideo;
+ if (audioPreroll.count() > 0 && config.FastSeekCleanAudio() && !isUriSource)
+ {
+ seekTarget = videoStream->ConvertPosition(audioTarget - audioPreroll);
+ if (av_seek_frame(avFormatCtx, videoStream->StreamIndex(), seekTarget, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY) < 0)
+ {
+ hr = E_FAIL;
+ DebugMessage(L" - ### Error while seeking\n");
+ }
+ else
+ {
+ // Flush all active streams with buffers
+ FlushCodecsAndBuffers();
+
+ // Now drop all packets until desired keyframe position
+ videoStream->SkipPacketsUntilTimestamp(timestampVideo);
+ audioStream->SkipPacketsUntilTimestamp(audioTarget);
+
+ auto sample = audioStream->GetNextSample();
+ if (sample)
+ {
+ actualPosition = sample.Timestamp() + sample.Duration();
+ }
+ }
+ }
+ else if (audioPreroll.count() <= 0)
+ {
+ // Negative audio preroll. Just drop all packets until target position.
+ audioStream->SkipPacketsUntilTimestamp(audioTarget);
+
+ hr = audioStream->GetNextPacketTimestamp(timestampAudio, timestampAudioDuration);
+ if (hr == S_OK && (config.FastSeekCleanAudio() || (timestampAudio + timestampAudioDuration) <= timestampVideo))
+ {
+ // decode one audio sample to get clean output
+ auto sample = audioStream->GetNextSample();
+ if (sample)
+ {
+ actualPosition = sample.Timestamp() + sample.Duration();
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return hr;
+}
+
+bool FFmpegReader::TrySeekBuffered(TimeSpan position, TimeSpan& actualPosition, bool fastSeek, bool isForwardSeek, std::shared_ptr videoStream, std::shared_ptr audioStream)
+{
+ bool result = true;
+ int vIndex = -1;
+ int aIndex = -1;
+
+ TimeSpan targetPosition = position;
+
+ if (videoStream)
+ {
+ auto pts = videoStream->ConvertPosition(targetPosition);
+ LONGLONG resultPts = pts;
+ vIndex = videoStream->packetBuffer->TryFindPacketIndex(pts, resultPts, true, fastSeek, isForwardSeek);
+ result &= vIndex >= 0;
+
+ if (result && fastSeek)
+ {
+ targetPosition = videoStream->ConvertPosition(resultPts);
+ }
+ }
+
+ if (result && audioStream)
+ {
+ auto pts = audioStream->ConvertPosition(targetPosition);
+ LONGLONG resultPts = pts;
+ aIndex = audioStream->packetBuffer->TryFindPacketIndex(pts, resultPts, false, fastSeek, isForwardSeek);
+ result &= aIndex >= 0;
+ }
+
+ if (result && vIndex == 0)
+ {
+ // We are at correct position already. No flush required.
+ DebugMessage(L"BufferedSeek: No flush\n");
+ }
+ else if (result)
+ {
+ // Flush all active streams but keep buffers
+ FlushCodecs();
+
+ if (videoStream)
+ {
+ videoStream->packetBuffer->DropPackets(vIndex);
+ }
+
+ if (audioStream)
+ {
+ if (config.FastSeekCleanAudio() && aIndex > 0)
+ {
+ aIndex--;
+ audioStream->packetBuffer->DropPackets(aIndex);
+
+ // decode one audio sample to get clean output
+ auto sample = audioStream->GetNextSample();
+ if (sample)
+ {
+ // TODO check if this is a good idea
+ //actualPosition = sample.Timestamp() + sample.Duration();
+ }
+ }
+ else
+ {
+ audioStream->packetBuffer->DropPackets(aIndex);
+ }
+ }
+
+ actualPosition = targetPosition;
+ }
+
+ return result;
+}
+
+void FFmpegReader::ReadDataLoop()
+{
+ bool sleep = false;
+
+ while (true)
+ {
+ // Read next packet
+ ReadPacket();
+
+ // Lock and check result
+ std::lock_guard lock(mutex);
+ if (!isActive)
+ {
+ // Stopped externally. No need to clean up.
+ break;
+ }
+ else if (isEof || !config.ReadAheadBufferEnabled())
+ {
+ // Self stop. Cleanup.
+ isActive = false;
+ sleepTimer->stop();
+ delete sleepTimer;
+ delete sleepTimerTarget;
+ waitStreamEvent.set();
+ break;
+ }
+ else if (readResult < 0 && avFormatCtx->url)
+ {
+ // if this is a uri stream, give it a little time to recover
+ Sleep(10);
+ }
+ else
+ {
+ // Check if needs sleep
+ sleep = CheckNeedsSleep(sleep);
+ if (sleep)
+ {
+ isSleeping = true;
+ sleepTimer->start();
+ break;
+ }
+ }
+ }
+}
+
+void FFmpegReader::OnTimer(int value)
+{
+ UNREFERENCED_PARAMETER(value);
+ std::lock_guard lock(mutex);
+ if (isActive)
+ {
+ readTask = create_task([this]() { ReadDataLoop(); });
+ isSleeping = false;
+ }
+ sleepTimer->pause();
+}
+
+bool FFmpegReader::CheckNeedsSleep(bool wasSleeping)
+{
+ bool force = forceReadStream >= 0;
+ if (force)
+ {
+ return false;
+ }
+
+ bool sleep = wasSleeping;
+
+ // check if we need to start sleeping
+ if (!sleep && lastStream && lastStream->IsBufferFull())
+ {
+ sleep = true;
+ fullStream = lastStream;
+ }
+ // check if we can stop sleeping
+ else if (sleep && !fullStream && !fullStream->IsBufferFull())
+ {
+ sleep = false;
+ fullStream = nullptr;
+ for (auto& stream : *sampleProviders)
+ {
+ if (stream && stream->IsBufferFull())
+ {
+ sleep = true;
+ fullStream = stream;
+ break;
+ }
+ }
+ }
+
+ return sleep;
}
// Read the next packet from the stream and push it into the appropriate
// sample provider
int FFmpegReader::ReadPacket()
{
- int ret;
AVPacket* avPacket = av_packet_alloc();
+
if (!avPacket)
{
+ DebugMessage(L"Out of memory!\n");
+ isEof = true;
return E_OUTOFMEMORY;
}
- ret = av_read_frame(m_pAvFormatCtx, avPacket);
- if (ret < 0)
+ auto ret = av_read_frame(avFormatCtx, avPacket);
+ std::lock_guard lock(mutex);
+ readResult = ret;
+
+ if (readResult < 0)
{
+ if (readResult == AVERROR_EOF || (avFormatCtx->pb && avFormatCtx->pb->eof_reached))
+ {
+ DebugMessage(L"End of stream reached. Stop reading packets.\n");
+ isEof = true;
+ }
+ else if (errorCount++ <= config.SkipErrors())
+ {
+ DebugMessage(L"Read packet error. Retrying...\n");
+ }
+ else
+ {
+ DebugMessage(L"Packet read error. Stop reading packets.\n");
+ isEof = true;
+ }
av_packet_free(&avPacket);
- return ret;
}
+ else
+ {
+ errorCount = 0;
+ if (avPacket->stream_index == forceReadStream)
+ {
+ forceReadStream = -1;
+ waitStreamEvent.set();
+ }
+
+ if (avPacket->stream_index >= (int)sampleProviders->size())
+ {
+ // new stream detected. if this is a subtitle stream, we could create it now.
+ av_packet_free(&avPacket);
+ }
+ else if (avPacket->stream_index < 0)
+ {
+ av_packet_free(&avPacket);
+ }
+ else
+ {
+ auto& provider = sampleProviders->at(avPacket->stream_index);
+ if (provider)
+ {
+ provider->QueuePacket(avPacket);
+ lastStream = provider;
+ }
+ else
+ {
+ DebugMessage(L"Ignoring unused stream\n");
+ av_packet_free(&avPacket);
+ }
+ }
+ }
+
+ return readResult;
+}
- if (avPacket->stream_index >= (int)sampleProviders->size())
+
+int FFmpegReader::ReadPacketForStream(StreamBuffer* buffer)
+{
+ if (!(buffer->IsEmpty()))
{
- // new stream detected. if this is a subtitle stream, we could create it now.
- av_packet_free(&avPacket);
- return ret;
+ return 0;
}
- if (avPacket->stream_index < 0)
+ bool manual;
{
- av_packet_free(&avPacket);
- return E_FAIL;
+ std::lock_guard lock(mutex);
+ manual = !isActive;
+ if (isEof)
+ {
+ return AVERROR_EOF;
+ }
}
- std::shared_ptr provider = sampleProviders->at(avPacket->stream_index);
- if (provider)
+ if (manual)
{
- provider->QueuePacket(avPacket);
+ // no read-ahead used
+ while (!isEof)
+ {
+ ReadPacket();
+
+ if (!(buffer->IsEmpty()))
+ {
+ break;
+ }
+ }
}
else
{
- DebugMessage(L"Ignoring unused stream\n");
- av_packet_free(&avPacket);
- }
+ // read-ahead active
+ while (true)
+ {
+ task waitStreamTask;
+ {
+ std::lock_guard lock(mutex);
+ if (!(buffer->IsEmpty()))
+ {
+ forceReadStream = -1;
+ break;
+ }
+ else if (isEof || !isActive)
+ {
+ break;
+ }
+ else
+ {
+ forceReadStream = buffer->StreamIndex;
+ waitStreamEvent = task_completion_event();
+ waitStreamTask = create_task(waitStreamEvent);
+ }
+ }
+
+ waitStreamTask.wait();
+ }
+ }
- return ret;
+ return readResult;
}
diff --git a/Source/FFmpegReader.h b/Source/FFmpegReader.h
index 93ab023e9e..0b12b35688 100644
--- a/Source/FFmpegReader.h
+++ b/Source/FFmpegReader.h
@@ -18,18 +18,57 @@
#pragma once
+#include
+
+#include "MediaSourceConfig.h"
#include "MediaSampleProvider.h"
+using namespace Concurrency;
+
+class StreamBuffer;
class FFmpegReader
{
public:
+ FFmpegReader(AVFormatContext* avFormatCtx, std::vector>* sampleProviders, MediaSourceConfig config);
virtual ~FFmpegReader();
- int ReadPacket();
- FFmpegReader(AVFormatContext* avFormatCtx, std::vector>* sampleProviders);
+ int ReadPacket();
+ int ReadPacketForStream(StreamBuffer* buffer);
+ void Start();
+ void Stop();
+ void Flush();
+ HRESULT Seek(TimeSpan position, TimeSpan& actualPosition, TimeSpan currentPosition, bool allowFastSeek, std::shared_ptr videoStream, std::shared_ptr audioStream);
private:
- AVFormatContext* m_pAvFormatCtx = NULL;
- std::vector>* sampleProviders = NULL;
+
+ bool TrySeekBuffered(TimeSpan position, TimeSpan& actualPosition, bool fastSeek, bool isForwardSeek, std::shared_ptr videoStream, std::shared_ptr audioStream);
+ HRESULT SeekFast(TimeSpan position, TimeSpan& actualPosition, TimeSpan currentPosition, std::shared_ptr videoStream, std::shared_ptr audioStream);
+ void OnTimer(int value);
+ void ReadDataLoop();
+ void FlushCodecs();
+ void FlushCodecsAndBuffers();
+ bool CheckNeedsSleep(bool wasSleeping);
+
+ AVFormatContext* avFormatCtx;
+ std::vector>* sampleProviders{ nullptr };
+ MediaSourceConfig config;
+ std::shared_ptr lastStream{ nullptr };
+ std::shared_ptr fullStream{ nullptr };
+
+ std::recursive_mutex mutex;
+ bool isActive = false;
+ bool isSleeping = false;
+ bool isEof = false;
+ unsigned int errorCount = 0;
+ int forceReadStream = 0;
+ int readResult = 0;
+ task readTask;
+ task_completion_event waitStreamEvent;
+ call* sleepTimerTarget = NULL;
+ timer* sleepTimer = NULL;
+
+ bool isLastSeekForward = false;
+ TimeSpan lastSeekStart { 0 };
+ TimeSpan lastSeekActual { 0 };
};
diff --git a/Source/MediaSampleProvider.cpp b/Source/MediaSampleProvider.cpp
index 653e5076e0..41807e904f 100644
--- a/Source/MediaSampleProvider.cpp
+++ b/Source/MediaSampleProvider.cpp
@@ -20,6 +20,7 @@
#include "MediaSampleProvider.h"
#include "FFmpegMediaSource.h"
#include "FFmpegReader.h"
+#include "StreamBuffer.h"
#include "LanguageTagConverter.h"
#include "AvCodecContextHelpers.h"
#include "Mfapi.h"
@@ -44,6 +45,7 @@ MediaSampleProvider::MediaSampleProvider(
, m_config(config)
, m_streamIndex(streamIndex)
, hardwareDecoderStatus(hardwareDecoderStatus)
+ , packetBuffer(new StreamBuffer(streamIndex, config))
{
DebugMessage(L"MediaSampleProvider\n");
@@ -68,7 +70,7 @@ MediaSampleProvider::~MediaSampleProvider()
avcodec_free_context(&m_pAvCodecCtx);
- Flush();
+ Flush(true);
device = nullptr;
deviceContext = nullptr;
}
@@ -206,7 +208,7 @@ void MediaSampleProvider::InitializeStreamInfo()
MediaStreamSample MediaSampleProvider::GetNextSample()
{
- DebugMessage(L"GetNextSample\n");
+ //DebugMessage(L"GetNextSample\n");
HRESULT hr = S_OK;
@@ -259,35 +261,11 @@ MediaStreamSample MediaSampleProvider::GetNextSample()
HRESULT MediaSampleProvider::GetNextPacket(AVPacket** avPacket, LONGLONG& packetPts, LONGLONG& packetDuration)
{
HRESULT hr = S_OK;
- unsigned int errorCount = 0;
- // Continue reading until there is an appropriate packet in the stream
- while (m_packetQueue.empty())
+ if (packetBuffer->ReadUntilNotEmpty(m_pReader))
{
- auto result = m_pReader->ReadPacket();
- if (result < 0)
- {
- if (result == AVERROR_EOF || (m_pAvFormatCtx->pb && m_pAvFormatCtx->pb->eof_reached))
- {
- DebugMessage(L"GetNextPacket reaching EOF\n");
- break;
- }
- else if (errorCount++ >= m_config.SkipErrors())
- {
- DebugMessage(L"Aborting after to too many read errors.\n");
- break;
- }
- else
- {
- DebugMessage(L"Read error.\n");
- }
- }
- }
+ auto packet = packetBuffer->PopPacket();
- if (!m_packetQueue.empty())
- {
- // read next packet and set pts values
- auto packet = PopPacket();
*avPacket = packet;
packetDuration = packet->duration;
@@ -323,20 +301,10 @@ HRESULT MediaSampleProvider::GetNextPacketTimestamp(TimeSpan& timestamp, TimeSpa
{
HRESULT hr = S_FALSE;
- // Continue reading until there is an appropriate packet in the stream
- while (m_packetQueue.empty())
- {
- if (m_pReader->ReadPacket() < 0)
- {
- DebugMessage(L"GetNextPacketTimestamp reaching EOF\n");
- break;
- }
- }
-
- if (!m_packetQueue.empty())
+ if (packetBuffer->ReadUntilNotEmpty(m_pReader))
{
// peek next packet and set pts value
- auto packet = m_packetQueue.front();
+ auto packet = packetBuffer->PeekPacket();
auto pts = packet->pts != AV_NOPTS_VALUE ? packet->pts : packet->dts;
if (pts != AV_NOPTS_VALUE)
{
@@ -352,51 +320,10 @@ HRESULT MediaSampleProvider::GetNextPacketTimestamp(TimeSpan& timestamp, TimeSpa
HRESULT MediaSampleProvider::SkipPacketsUntilTimestamp(TimeSpan const& timestamp)
{
HRESULT hr = S_OK;
- bool foundPacket = false;
- while (hr == S_OK && !foundPacket)
+ if (!packetBuffer->SkipUntilTimestamp(m_pReader, ConvertPosition(timestamp)))
{
- // Continue reading until there is an appropriate packet in the stream
- while (m_packetQueue.empty())
- {
- if (m_pReader->ReadPacket() < 0)
- {
- DebugMessage(L"SkipPacketsUntilTimestamp reaching EOF\n");
- break;
- }
- }
-
- if (!m_packetQueue.empty())
- {
- // peek next packet and check pts value
- auto packet = m_packetQueue.front();
-
- auto pts = packet->pts != AV_NOPTS_VALUE ? packet->pts : packet->dts;
- if (pts != AV_NOPTS_VALUE && packet->duration != AV_NOPTS_VALUE)
- {
- auto packetEnd = ConvertPosition(pts + packet->duration);
- if (packet->duration > 0 ? packetEnd <= timestamp : packetEnd < timestamp)
- {
- m_packetQueue.pop();
- av_packet_free(&packet);
- }
- else
- {
- foundPacket = true;
- break;
- }
- }
- else
- {
- hr = S_FALSE;
- break;
- }
- }
- else
- {
- // no more packet found
- hr = S_FALSE;
- }
+ hr = S_FALSE;
}
return hr;
@@ -404,11 +331,11 @@ HRESULT MediaSampleProvider::SkipPacketsUntilTimestamp(TimeSpan const& timestamp
void MediaSampleProvider::QueuePacket(AVPacket* packet)
{
- DebugMessage(L" - QueuePacket\n");
+ //DebugMessage(L" - QueuePacket\n");
if (m_isEnabled)
{
- m_packetQueue.push(packet);
+ packetBuffer->QueuePacket(packet);
}
else
{
@@ -416,32 +343,21 @@ void MediaSampleProvider::QueuePacket(AVPacket* packet)
}
}
-AVPacket* MediaSampleProvider::PopPacket()
+bool MediaSampleProvider::IsBufferFull()
{
- DebugMessage(L" - PopPacket\n");
- AVPacket* result = NULL;
-
- if (!m_packetQueue.empty())
- {
- result = m_packetQueue.front();
- m_packetQueue.pop();
- }
-
- return result;
+ return IsEnabled() && packetBuffer->IsFull(this);
}
-void MediaSampleProvider::Flush()
+void MediaSampleProvider::Flush(bool flushBuffers)
{
- DebugMessage(L"Flush\n");
- while (!m_packetQueue.empty())
- {
- AVPacket* avPacket = PopPacket();
- av_packet_free(&avPacket);
- }
if (m_pAvCodecCtx)
{
avcodec_flush_buffers(m_pAvCodecCtx);
}
+ if (flushBuffers)
+ {
+ packetBuffer->Flush();
+ }
m_isDiscontinuous = true;
IsCleanSample = false;
}
@@ -456,7 +372,6 @@ void MediaSampleProvider::EnableStream()
void MediaSampleProvider::DisableStream()
{
DebugMessage(L"DisableStream\n");
- Flush();
m_isEnabled = false;
m_pAvStream->discard = AVDISCARD_ALL;
}
@@ -520,13 +435,13 @@ void MediaSampleProvider::SetCommonVideoEncodingProperties(VideoEncodingProperti
void MediaSampleProvider::Detach()
{
- Flush();
+ Flush(false);
m_pReader = nullptr;
avcodec_free_context(&m_pAvCodecCtx);
}
void free_buffer(void* lpVoid)
{
- auto buffer = (AVBufferRef*)lpVoid;
+ auto buffer = (AVBufferRef *)lpVoid;
av_buffer_unref(&buffer);
}
diff --git a/Source/MediaSampleProvider.h b/Source/MediaSampleProvider.h
index 10a3c9aab4..0ab5a3b647 100644
--- a/Source/MediaSampleProvider.h
+++ b/Source/MediaSampleProvider.h
@@ -28,13 +28,14 @@ using namespace std;
using namespace winrt::FFmpegInteropX;
class FFmpegReader;
+class StreamBuffer;
class MediaSampleProvider
{
public:
virtual ~MediaSampleProvider();
virtual winrt::Windows::Media::Core::MediaStreamSample GetNextSample();
- virtual void Flush();
+ virtual void Flush(bool flushBuffers);
winrt::Windows::Media::Core::IMediaStreamDescriptor StreamDescriptor()
{
@@ -102,15 +103,15 @@ class MediaSampleProvider
void InitializeNameLanguageCodec();
virtual void InitializeStreamInfo();
virtual void QueuePacket(AVPacket* packet);
- AVPacket* PopPacket();
HRESULT GetNextPacket(AVPacket** avPacket, LONGLONG& packetPts, LONGLONG& packetDuration);
+ bool IsBufferFull();
virtual HRESULT CreateNextSampleBuffer(winrt::Windows::Storage::Streams::IBuffer* pBuffer, int64_t& samplePts, int64_t& sampleDuration, winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DSurface* surface) = 0;
HRESULT GetNextPacketTimestamp(TimeSpan& timestamp, TimeSpan& packetDuration);
HRESULT SkipPacketsUntilTimestamp(TimeSpan const& timestamp);
virtual winrt::Windows::Media::Core::IMediaStreamDescriptor CreateStreamDescriptor() = 0;
virtual HRESULT SetSampleProperties(winrt::Windows::Media::Core::MediaStreamSample const& sample) { UNREFERENCED_PARAMETER(sample); return S_OK; }; // can be overridded for setting extended properties
- void EnableStream();
- void DisableStream();
+ virtual void EnableStream();
+ virtual void DisableStream();
virtual void SetFilters(winrt::hstring filterDefinition) { };// override for setting effects in sample providers
virtual void DisableFilters() {};//override for disabling filters in sample providers;
virtual void SetCommonVideoEncodingProperties(winrt::Windows::Media::MediaProperties::VideoEncodingProperties const& videoEncodingProperties, bool isCompressedFormat);
@@ -184,11 +185,14 @@ class MediaSampleProvider
HardwareDecoderStatus hardwareDecoderStatus);
private:
- std::queue m_packetQueue;
INT64 m_nextPacketPts = 0;
winrt::Windows::Media::Core::IMediaStreamDescriptor m_streamDescriptor = nullptr;
HardwareDecoderStatus hardwareDecoderStatus;
+public:
+ std::shared_ptr packetBuffer;
+
+
protected:
// The FFmpeg context. Because they are complex types
// we declare them as internal so they don't get exposed
diff --git a/Source/MediaSourceConfig.cpp b/Source/MediaSourceConfig.cpp
index 1f72ae5df5..1a79c3d9ed 100644
--- a/Source/MediaSourceConfig.cpp
+++ b/Source/MediaSourceConfig.cpp
@@ -2,517 +2,3 @@
#include "MediaSourceConfig.h"
#include "MediaSourceConfig.g.cpp"
#include "winrt/FFmpegInteropX.h"
-
-using namespace winrt::Windows::Foundation::Collections;
-using namespace winrt::Windows::Media::Core;
-
-
-// Note: Remove this static_assert after copying these generated source files to your project.
-// This assertion exists to avoid compiling these generated source files directly.
-//static_assert(false, "Do not compile generated C++/WinRT source files directly");
-
-namespace winrt::FFmpegInteropX::implementation
-{
- MediaSourceConfig::MediaSourceConfig()
- {
- m_PassthroughAudioMP3 = false;
- m_PassthroughAudioAAC = false;
-
- m_VideoDecoderMode = VideoDecoderMode::Automatic;
-
- m_SystemDecoderH264MaxProfile = FF_PROFILE_H264_HIGH;
- m_SystemDecoderH264MaxLevel = 41;
- m_SystemDecoderHEVCMaxProfile = FF_PROFILE_HEVC_MAIN_10;
- m_SystemDecoderHEVCMaxLevel = -1;
- m_VideoOutputAllowIyuv = false;
- m_VideoOutputAllow10bit = true;
- m_VideoOutputAllowBgra8 = false;
- m_VideoOutputAllowNv12 = true;
-
- m_SkipErrors = 50;
- m_MaxAudioThreads = 2;
-
- m_MaxSupportedPlaybackRate = 4.0;
- m_StreamBufferSize = 16384;
-
- m_FFmpegOptions = PropertySet();
-
- m_DefaultBufferTime = TimeSpan{ 0 };
- m_DefaultBufferTimeUri = TimeSpan{ 50000000 };
-
- m_AutoSelectForcedSubtitles = true;
- m_OverrideSubtitleStyles = false;
-
- m_SubtitleRegion = TimedTextRegion();
-
- TimedTextSize extent;
- extent.Unit = TimedTextUnit::Percentage;
- extent.Width = 100;
- extent.Height = 88;
- m_SubtitleRegion.Extent(extent);
- TimedTextPoint position;
- position.Unit = TimedTextUnit::Pixels;
- position.X = 0;
- position.Y = 0;
- m_SubtitleRegion.Position(position);
- m_SubtitleRegion.DisplayAlignment(TimedTextDisplayAlignment::After);
- m_SubtitleRegion.Background(winrt::Windows::UI::Colors::Transparent());
- m_SubtitleRegion.ScrollMode(TimedTextScrollMode::Rollup);
- m_SubtitleRegion.TextWrapping(TimedTextWrapping::Wrap);
- m_SubtitleRegion.WritingMode(TimedTextWritingMode::LeftRightTopBottom);
- m_SubtitleRegion.IsOverflowClipped(false);
- m_SubtitleRegion.ZIndex(0);
- TimedTextDouble LineHeight;
- LineHeight.Unit = TimedTextUnit::Percentage;
- LineHeight.Value = 100;
- m_SubtitleRegion.LineHeight(LineHeight);
- TimedTextPadding padding;
- padding.Unit = TimedTextUnit::Percentage;
- padding.Start = 0;
- padding.After = 0;
- padding.Before = 0;
- padding.End = 0;
- m_SubtitleRegion.Padding(padding);
- m_SubtitleRegion.Name(L"");
-
- m_SubtitleStyle = TimedTextStyle();
-
- m_SubtitleStyle.FontFamily(L"default");
- TimedTextDouble fontSize;
- fontSize.Unit = TimedTextUnit::Percentage;
- fontSize.Value = 100;
- m_SubtitleStyle.FontSize(fontSize);
- m_SubtitleStyle.LineAlignment(TimedTextLineAlignment::Center);
- m_SubtitleStyle.FontStyle(TimedTextFontStyle::Normal);
- m_SubtitleStyle.FontWeight(TimedTextWeight::Normal);
- m_SubtitleStyle.Foreground(winrt::Windows::UI::Colors::White());
- m_SubtitleStyle.Background(Windows::UI::Colors::Transparent());
- //OutlineRadius = new TimedTextDouble { Unit = TimedTextUnit.Percentage, Value = 10 },
- TimedTextDouble outlineThickness;
- outlineThickness.Unit = TimedTextUnit::Percentage;
- outlineThickness.Value = 4.5;
- m_SubtitleStyle.OutlineThickness(outlineThickness);
- m_SubtitleStyle.FlowDirection(TimedTextFlowDirection::LeftToRight);
- m_SubtitleStyle.OutlineColor(winrt::Windows::UI::Color{ 0x80, 0, 0, 0 });
-
- m_AutoCorrectAnsiSubtitles = true;
- AnsiSubtitleEncoding(CharacterEncoding::GetSystemDefault());
-
- m_FastSeek = false;
- m_FastSeekCleanAudio = true;
- m_FastSeekSmartStreamSwitching = true;
-
- m_DefaultAudioStreamName = L"Audio Stream";
- m_DefaultSubtitleStreamName = L"Subtitle";
- m_DefaultExternalSubtitleStreamName = L"External Subtitle";
-
- m_AttachmentCacheFolderName = L"FFmpegAttachmentCache";
- m_UseEmbeddedSubtitleFonts = true;
-
- m_MinimumSubtitleDuration = TimeSpan{ 0 };
- m_AdditionalSubtitleDuration = TimeSpan{ 0 };
- m_PreventModifiedSubtitleDurationOverlap = true;
- }
-
-
- bool MediaSourceConfig::PassthroughAudioMP3()
- {
- return m_PassthroughAudioMP3;
- }
-
- void MediaSourceConfig::PassthroughAudioMP3(bool value)
- {
- m_PassthroughAudioMP3 = value;
- }
-
- bool MediaSourceConfig::PassthroughAudioAAC()
- {
- return m_PassthroughAudioAAC;
- }
-
- void MediaSourceConfig::PassthroughAudioAAC(bool value)
- {
- m_PassthroughAudioAAC = value;
- }
-
- FFmpegInteropX::VideoDecoderMode MediaSourceConfig::VideoDecoderMode()
- {
- return m_VideoDecoderMode;
- }
-
- void MediaSourceConfig::VideoDecoderMode(FFmpegInteropX::VideoDecoderMode const& value)
- {
- m_VideoDecoderMode = value;
- }
- int32_t MediaSourceConfig::SystemDecoderH264MaxProfile()
- {
- return m_SystemDecoderH264MaxProfile;
- }
-
- void MediaSourceConfig::SystemDecoderH264MaxProfile(int32_t value)
- {
- m_SystemDecoderH264MaxProfile = value;
- }
-
- int32_t MediaSourceConfig::SystemDecoderH264MaxLevel()
- {
- return m_SystemDecoderH264MaxLevel;
- }
-
- void MediaSourceConfig::SystemDecoderH264MaxLevel(int32_t value)
- {
- m_SystemDecoderH264MaxLevel = value;
- }
-
- int32_t MediaSourceConfig::SystemDecoderHEVCMaxProfile()
- {
- return m_SystemDecoderHEVCMaxProfile;
- }
-
- void MediaSourceConfig::SystemDecoderHEVCMaxProfile(int32_t value)
- {
- m_SystemDecoderHEVCMaxProfile = value;
- }
-
- int32_t MediaSourceConfig::SystemDecoderHEVCMaxLevel()
- {
- return m_SystemDecoderHEVCMaxLevel;
- }
-
- void MediaSourceConfig::SystemDecoderHEVCMaxLevel(int32_t value)
- {
- m_SystemDecoderHEVCMaxLevel = value;
- }
-
- bool MediaSourceConfig::VideoOutputAllowIyuv()
- {
- return m_VideoOutputAllowIyuv;
- }
-
- void MediaSourceConfig::VideoOutputAllowIyuv(bool value)
- {
- m_VideoOutputAllowIyuv = value;
- }
-
- bool MediaSourceConfig::VideoOutputAllow10bit()
- {
- return m_VideoOutputAllow10bit;
- }
-
- void MediaSourceConfig::VideoOutputAllow10bit(bool value)
- {
- m_VideoOutputAllow10bit = value;
- }
-
- bool MediaSourceConfig::VideoOutputAllowBgra8()
- {
- return m_VideoOutputAllowBgra8;
- }
-
- void MediaSourceConfig::VideoOutputAllowBgra8(bool value)
- {
- m_VideoOutputAllowBgra8 = value;
- }
-
- bool MediaSourceConfig::VideoOutputAllowNv12()
- {
- return m_VideoOutputAllowNv12;
- }
-
- void MediaSourceConfig::VideoOutputAllowNv12(bool value)
- {
- m_VideoOutputAllowNv12 = value;
- }
-
- uint32_t MediaSourceConfig::SkipErrors()
- {
- return m_SkipErrors;
- }
-
- void MediaSourceConfig::SkipErrors(uint32_t value)
- {
- m_SkipErrors = value;
- }
-
- uint32_t MediaSourceConfig::MaxVideoThreads()
- {
- return m_MaxVideoThreads;
- }
-
- void MediaSourceConfig::MaxVideoThreads(uint32_t value)
- {
- m_MaxVideoThreads = value;
- }
-
- uint32_t MediaSourceConfig::MaxAudioThreads()
- {
- return m_MaxAudioThreads;
- }
-
- void MediaSourceConfig::MaxAudioThreads(uint32_t value)
- {
- m_MaxAudioThreads = value;
- }
-
- double MediaSourceConfig::MaxSupportedPlaybackRate()
- {
- return m_MaxSupportedPlaybackRate;
- }
-
- void MediaSourceConfig::MaxSupportedPlaybackRate(double value)
- {
- m_MaxSupportedPlaybackRate = value;
- }
-
- uint32_t MediaSourceConfig::StreamBufferSize()
- {
- return m_StreamBufferSize;
- }
-
- void MediaSourceConfig::StreamBufferSize(uint32_t value)
- {
- m_StreamBufferSize = value;
- }
-
- Windows::Foundation::Collections::PropertySet MediaSourceConfig::FFmpegOptions()
- {
- return m_FFmpegOptions;
- }
-
- void MediaSourceConfig::FFmpegOptions(Windows::Foundation::Collections::PropertySet const& value)
- {
- m_FFmpegOptions = value;
- }
-
- Windows::Foundation::TimeSpan MediaSourceConfig::DefaultBufferTime()
- {
- return m_DefaultBufferTime;
- }
-
- void MediaSourceConfig::DefaultBufferTime(Windows::Foundation::TimeSpan const& value)
- {
- m_DefaultBufferTime = value;
- }
-
- Windows::Foundation::TimeSpan MediaSourceConfig::DefaultBufferTimeUri()
- {
- return m_DefaultBufferTimeUri;
- }
-
- void MediaSourceConfig::DefaultBufferTimeUri(Windows::Foundation::TimeSpan const& value)
- {
- m_DefaultBufferTimeUri = value;
- }
-
- bool MediaSourceConfig::AutoSelectForcedSubtitles()
- {
- return m_AutoSelectForcedSubtitles;
- }
-
- void MediaSourceConfig::AutoSelectForcedSubtitles(bool value)
- {
- m_AutoSelectForcedSubtitles = value;
- }
-
- bool MediaSourceConfig::OverrideSubtitleStyles()
- {
- return m_OverrideSubtitleStyles;
- }
-
- void MediaSourceConfig::OverrideSubtitleStyles(bool value)
- {
- m_OverrideSubtitleStyles = value;
- }
-
- Windows::Media::Core::TimedTextRegion MediaSourceConfig::SubtitleRegion()
- {
- return m_SubtitleRegion;
- }
-
- void MediaSourceConfig::SubtitleRegion(Windows::Media::Core::TimedTextRegion const& value)
- {
- m_SubtitleRegion = value;
- }
-
- Windows::Media::Core::TimedTextStyle MediaSourceConfig::SubtitleStyle()
- {
- return m_SubtitleStyle;
- }
-
- void MediaSourceConfig::SubtitleStyle(Windows::Media::Core::TimedTextStyle const& value)
- {
- m_SubtitleStyle = value;
- }
-
- bool MediaSourceConfig::AutoCorrectAnsiSubtitles()
- {
- return m_AutoCorrectAnsiSubtitles;
- }
-
- void MediaSourceConfig::AutoCorrectAnsiSubtitles(bool value)
- {
- m_AutoCorrectAnsiSubtitles = value;
- }
-
- FFmpegInteropX::CharacterEncoding MediaSourceConfig::AnsiSubtitleEncoding()
- {
- return m_CharacterEncoding;
- }
-
- void MediaSourceConfig::AnsiSubtitleEncoding(FFmpegInteropX::CharacterEncoding const& value)
- {
- if (value == nullptr)
- throw_hresult(E_INVALIDARG);
- m_CharacterEncoding = value;
- }
-
- Windows::Foundation::TimeSpan MediaSourceConfig::DefaultSubtitleDelay()
- {
- return m_DefaultSubtitleDelay;
- }
-
- void MediaSourceConfig::DefaultSubtitleDelay(Windows::Foundation::TimeSpan const& value)
- {
- m_DefaultSubtitleDelay = value;
- }
-
- bool MediaSourceConfig::FastSeek()
- {
- return m_FastSeek;
- }
-
- void MediaSourceConfig::FastSeek(bool value)
- {
- m_FastSeek = value;
- }
-
- bool MediaSourceConfig::FastSeekCleanAudio()
- {
- return m_FastSeekCleanAudio;
- }
-
- void MediaSourceConfig::FastSeekCleanAudio(bool value)
- {
- m_FastSeekCleanAudio = value;
- }
-
- bool MediaSourceConfig::FastSeekSmartStreamSwitching()
- {
- return m_FastSeekSmartStreamSwitching;
- }
-
- void MediaSourceConfig::FastSeekSmartStreamSwitching(bool value)
- {
- m_FastSeekSmartStreamSwitching = value;
- }
-
- hstring MediaSourceConfig::DefaultAudioStreamName()
- {
- return m_DefaultAudioStreamName;
- }
-
- void MediaSourceConfig::DefaultAudioStreamName(hstring const& value)
- {
- m_DefaultAudioStreamName = value;
- }
-
- hstring MediaSourceConfig::DefaultSubtitleStreamName()
- {
- return m_DefaultSubtitleStreamName;
- }
-
- void MediaSourceConfig::DefaultSubtitleStreamName(hstring const& value)
- {
- m_DefaultSubtitleStreamName = value;
- }
-
- hstring MediaSourceConfig::DefaultExternalSubtitleStreamName()
- {
- return m_DefaultExternalSubtitleStreamName;
- }
-
- void MediaSourceConfig::DefaultExternalSubtitleStreamName(hstring const& value)
- {
- m_DefaultExternalSubtitleStreamName = value;
- }
-
- bool MediaSourceConfig::UseEmbeddedSubtitleFonts()
- {
- return m_UseEmbeddedSubtitleFonts;
- }
-
- void MediaSourceConfig::UseEmbeddedSubtitleFonts(bool value)
- {
- m_UseEmbeddedSubtitleFonts = value;
- }
-
- hstring MediaSourceConfig::AttachmentCacheFolderName()
- {
- return m_AttachmentCacheFolderName;
- }
-
- void MediaSourceConfig::AttachmentCacheFolderName(hstring const& value)
- {
- m_AttachmentCacheFolderName = value;
- }
-
- Windows::Foundation::TimeSpan MediaSourceConfig::MinimumSubtitleDuration()
- {
- return m_MinimumSubtitleDuration;
- }
-
- void MediaSourceConfig::MinimumSubtitleDuration(Windows::Foundation::TimeSpan const& value)
- {
- m_MinimumSubtitleDuration = value;
- }
-
- Windows::Foundation::TimeSpan MediaSourceConfig::AdditionalSubtitleDuration()
- {
- return m_AdditionalSubtitleDuration;
- }
-
- void MediaSourceConfig::AdditionalSubtitleDuration(Windows::Foundation::TimeSpan const& value)
- {
- m_AdditionalSubtitleDuration = value;
- }
-
- bool MediaSourceConfig::PreventModifiedSubtitleDurationOverlap()
- {
- return m_PreventModifiedSubtitleDurationOverlap;
- }
-
- void MediaSourceConfig::PreventModifiedSubtitleDurationOverlap(bool value)
- {
- m_PreventModifiedSubtitleDurationOverlap = value;
- }
-
- hstring MediaSourceConfig::FFmpegVideoFilters()
- {
- return m_FFmpegVideoFilters;
- }
-
- void MediaSourceConfig::FFmpegVideoFilters(hstring const& value)
- {
- m_FFmpegVideoFilters = value;
- }
-
- hstring MediaSourceConfig::FFmpegAudioFilters()
- {
- return m_FFmpegAudioFilters;
- }
-
- void MediaSourceConfig::FFmpegAudioFilters(hstring const& value)
- {
- m_FFmpegAudioFilters = value;
- }
-
- bool MediaSourceConfig::DownmixAudioStreamsToStereo()
- {
- return m_DownmixAudioStreamsToStereo;
- }
-
- void MediaSourceConfig::DownmixAudioStreamsToStereo(bool value)
- {
- m_DownmixAudioStreamsToStereo = value;
- }
-}
diff --git a/Source/MediaSourceConfig.h b/Source/MediaSourceConfig.h
index ed891c4e75..51297371a4 100644
--- a/Source/MediaSourceConfig.h
+++ b/Source/MediaSourceConfig.h
@@ -6,191 +6,182 @@ using namespace winrt::Windows::Foundation::Collections;
// This assertion exists to avoid compiling these generated source files directly.
//static_assert(false, "Do not compile generated C++/WinRT source files directly");
+#define PROPERTY( name, type, defaultValue ) \
+ public: \
+ type name() { return _##name; } \
+ void name(type value) { _##name = value; } \
+ private: \
+ type _##name = defaultValue;
+
+#define PROPERTY_CONST( name, type, defaultValue ) \
+ public: \
+ type name() { return _##name; } \
+ void name(type const& value) { _##name = value; } \
+ private: \
+ type _##name = defaultValue;
+
namespace winrt::FFmpegInteropX::implementation
{
+ using namespace winrt::Windows::Foundation;
+ using namespace winrt::Windows::Foundation::Collections;
+ using namespace winrt::Windows::Media::Core;
+
struct MediaSourceConfig : MediaSourceConfigT
{
- public:
- MediaSourceConfig();
-
-
///Enable passthrough for MP3 audio to system decoder.
///This could allow hardware decoding on some platforms (e.g. Windows Phone).
- bool PassthroughAudioMP3();
- void PassthroughAudioMP3(bool value);
+ PROPERTY(PassthroughAudioMP3, bool, false);
///Enable passthrough for AAC audio to system decoder.
///This could allow hardware decoding on some platforms (e.g. Windows Phone).
- bool PassthroughAudioAAC();
- void PassthroughAudioAAC(bool value);
+ PROPERTY(PassthroughAudioAAC, bool, false);
///Sets the video decoder mode. Default is AutoDetection.
- FFmpegInteropX::VideoDecoderMode VideoDecoderMode();
- void VideoDecoderMode(FFmpegInteropX::VideoDecoderMode const& value);
+ PROPERTY(VideoDecoderMode, FFmpegInteropX::VideoDecoderMode, VideoDecoderMode::Automatic);
///Max profile allowed for H264 system decoder. Default: High Profile (100). See FF_PROFILE_H264_* values.
- int32_t SystemDecoderH264MaxProfile();
- void SystemDecoderH264MaxProfile(int32_t value);
+ PROPERTY(SystemDecoderH264MaxProfile, int32_t, FF_PROFILE_H264_HIGH);
///Max level allowed for H264 system decoder. Default: Level 4.1 (41). Use -1 to disable level check.
///Most H264 HW decoders only support Level 4.1, so this is the default.
- int32_t SystemDecoderH264MaxLevel();
- void SystemDecoderH264MaxLevel(int32_t value);
+ PROPERTY(SystemDecoderH264MaxLevel, int32_t, 41);
///Max profile allowed for HEVC system decoder. Default: High10 Profile (2). See FF_PROFILE_HEVC_* values.
- int32_t SystemDecoderHEVCMaxProfile();
- void SystemDecoderHEVCMaxProfile(int32_t value);
+ PROPERTY(SystemDecoderHEVCMaxProfile, int32_t, FF_PROFILE_HEVC_MAIN_10);
///Max level allowed for HEVC system decoder. Default: Disabled (-1).
///Encoded as: 30*Major + 3*Minor. So Level 6.0 = 30*6 = 180, 5.1 = 30*5 + 3*1 = 163, 4.1 = 123.
///Many HEVC HW decoders support even very high levels, so we disable the check by default.
- int32_t SystemDecoderHEVCMaxLevel();
- void SystemDecoderHEVCMaxLevel(int32_t value);
+ PROPERTY(SystemDecoderHEVCMaxLevel, int32_t, -1);
///Allow video output in IYuv format.
- bool VideoOutputAllowIyuv();
- void VideoOutputAllowIyuv(bool value);
+ PROPERTY(VideoOutputAllowIyuv, bool, false);
///Allow video output in 10bit formats.
- bool VideoOutputAllow10bit();
- void VideoOutputAllow10bit(bool value);
+ PROPERTY(VideoOutputAllow10bit, bool, true);
///Allow video output in BGRA format - required for video transparency.
- bool VideoOutputAllowBgra8();
- void VideoOutputAllowBgra8(bool value);
+ PROPERTY(VideoOutputAllowBgra8, bool, false);
///Allow video output in NV12 format.
- bool VideoOutputAllowNv12();
- void VideoOutputAllowNv12(bool value);
+ PROPERTY(VideoOutputAllowNv12, bool, true);
- ///The maximum number of broken frames to skipp in a stream before stopping decoding.
- uint32_t SkipErrors();
- void SkipErrors(uint32_t value);
+ ///The maximum number of broken frames or packets to skip in a stream before stopping decoding.
+ PROPERTY(SkipErrors, int32_t, 50);
- ///The maximum number of video decoding threads.
- uint32_t MaxVideoThreads();
- void MaxVideoThreads(uint32_t value);
+ ///The maximum number of video decoding threads. Setting to means using the number of logical CPU cores.
+ PROPERTY(MaxVideoThreads, int32_t, 0);
- ///The maximum number of audio decoding threads.
- uint32_t MaxAudioThreads();
- void MaxAudioThreads(uint32_t value);
+ ///The maximum number of audio decoding threads. Setting to means using the number of logical CPU cores.
+ PROPERTY(MaxAudioThreads, int32_t, 2);
///The maximum supported playback rate. This is set on the media stream source itself.
/// This does not modify what the transport control default UI shows as available playback speeds. Custom UI is necessary!
- double MaxSupportedPlaybackRate();
- void MaxSupportedPlaybackRate(double value);
+ PROPERTY(MaxSupportedPlaybackRate, double, 4.0);
///The buffer size in bytes to use for Windows.Storage.Streams.IRandomAccessStream sources.
- uint32_t StreamBufferSize();
- void StreamBufferSize(uint32_t value);
+ //[deprecated("Deprecated due to irritating name. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.", deprecate, 1)]
+ PROPERTY(StreamBufferSize, int32_t, 16384);
+
+ ///The maximum number of bytes to read in one chunk for Windows.Storage.Streams.IRandomAccessStream sources.
+ PROPERTY(FileStreamReadSize, int32_t, 16384);
///Additional options to use when creating the ffmpeg AVFormatContext.
- Windows::Foundation::Collections::PropertySet FFmpegOptions();
- void FFmpegOptions(Windows::Foundation::Collections::PropertySet const& value);
+ PROPERTY_CONST(FFmpegOptions, PropertySet, PropertySet());
///The default BufferTime that gets assigned to the MediaStreamSource for Windows.Storage.Streams.IRandomAccessStream sources.
- ///A value of 0 is recommended for local files, to avoid framework bugs and unneccessary memory consumption.
- winrt::Windows::Foundation::TimeSpan DefaultBufferTime();
- void DefaultBufferTime(winrt::Windows::Foundation::TimeSpan const& value);
+ ///Deprecated due to framework bugs and memory consumption. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.
+ PROPERTY_CONST(DefaultBufferTime, TimeSpan, TimeSpan{ 0 });
///The default BufferTime that gets assigned to the MediaStreamSource for URI sources.
- ///Default is 5 seconds. You might want to use higher values, especially for DASH stream sources.
- winrt::Windows::Foundation::TimeSpan DefaultBufferTimeUri();
- void DefaultBufferTimeUri(winrt::Windows::Foundation::TimeSpan const& value);
+ ///Deprecated due to framework bugs and memory consumption. Use ReadAheadBufferSize and ReadAheadBufferDuration instead.
+ PROPERTY_CONST(DefaultBufferTimeUri, TimeSpan, TimeSpan{ 0 });
+
+
+ ///Enables or disables the read-ahead buffer.
+ ///This value can be changed any time during playback.
+ PROPERTY(ReadAheadBufferEnabled, bool, false);
+
+ ///The maximum number of bytes to buffer ahead per stream.
+ ///This value can be changed any time during playback.
+ PROPERTY(ReadAheadBufferSize, int64_t, 100*1024*1024);
+
+ ///The maximum duration to buffer ahead per stream.
+ ///This value can be changed any time during playback.
+ PROPERTY_CONST(ReadAheadBufferDuration, TimeSpan, TimeSpan{ 600000000 });
///Automatically select subtitles when they have the 'forced' flag set.
- bool AutoSelectForcedSubtitles();
- void AutoSelectForcedSubtitles(bool value);
+ PROPERTY(AutoSelectForcedSubtitles, bool, true);
///Use SubtitleRegion and SubtitleStyle from config class, even if custom styles are defined for a subtitle.
- bool OverrideSubtitleStyles();
- void OverrideSubtitleStyles(bool value);
+ PROPERTY(OverrideSubtitleStyles, bool, false);
///Default region to use for subtitles.
- Windows::Media::Core::TimedTextRegion SubtitleRegion();
- void SubtitleRegion(Windows::Media::Core::TimedTextRegion const& value);
+ PROPERTY_CONST(SubtitleRegion, TimedTextRegion, CreateDefaultSubtitleRegion());
///Default style to use for subtitles.
- Windows::Media::Core::TimedTextStyle SubtitleStyle();
- void SubtitleStyle(Windows::Media::Core::TimedTextStyle const& value);
+ PROPERTY_CONST(SubtitleStyle, TimedTextStyle, CreateDefaultSubtitleStyle());
///Enable conversion of ANSI encoded subtitles to UTF-8.
- bool AutoCorrectAnsiSubtitles();
- void AutoCorrectAnsiSubtitles(bool value);
+ PROPERTY(AutoCorrectAnsiSubtitles, bool, true);
///The character encoding used to decode ANSI encoded subtitles. By default, the active windows codepage is used.
- FFmpegInteropX::CharacterEncoding AnsiSubtitleEncoding();
- void AnsiSubtitleEncoding(FFmpegInteropX::CharacterEncoding const& value);
+ PROPERTY_CONST(AnsiSubtitleEncoding, FFmpegInteropX::CharacterEncoding, CharacterEncoding::GetSystemDefault());
///The subtitle delay will be initially applied to all subtitle tracks.
///Use SetSubtitleDelay() on the FFmpegMediaSource instance if you want to change the delay during playback.
- winrt::Windows::Foundation::TimeSpan DefaultSubtitleDelay();
- void DefaultSubtitleDelay(winrt::Windows::Foundation::TimeSpan const& value);
+ PROPERTY_CONST(DefaultSubtitleDelay, TimeSpan, TimeSpan{ 0 });
/// FFmpegMediaSource will seek to the closest video keyframe, if set to true.
///
/// For FastSeek to work, you must use the MediaPlayer for playback, and assign
- /// MediaPlayer.PlaybackSession to the FFmpegMediaSource.PlaybackSession .
+ /// MediaPlayer.PlaybackSession to the FFmpegMediaSource.PlaybackSession property.
///
- bool FastSeek();
- void FastSeek(bool value);
+ PROPERTY(FastSeek, bool, true);
///Ensure that audio plays without artifacts after fast seeking.
///This will slightly reduce the speed of fast seeking. Enabled by default.
- bool FastSeekCleanAudio();
- void FastSeekCleanAudio(bool value);
+ PROPERTY(FastSeekCleanAudio, bool, true);
///Try to improve stream switching times when FastSeek is enabled.
- bool FastSeekSmartStreamSwitching();
- void FastSeekSmartStreamSwitching(bool value);
+ PROPERTY(FastSeekSmartStreamSwitching, bool, true);
///The default name to use for audio streams.
- hstring DefaultAudioStreamName();
- void DefaultAudioStreamName(hstring const& value);
+ PROPERTY_CONST(DefaultAudioStreamName, hstring, L"Audio Stream");
///The default name to use for subtitle streams.
- hstring DefaultSubtitleStreamName();
- void DefaultSubtitleStreamName(hstring const& value);
+ PROPERTY_CONST(DefaultSubtitleStreamName, hstring, L"Subtitle");
///The default name to use for external subtitle streams.
- hstring DefaultExternalSubtitleStreamName();
- void DefaultExternalSubtitleStreamName(hstring const& value);
+ PROPERTY_CONST(DefaultExternalSubtitleStreamName, hstring, L"External Subtitle");
///Use subtitle font files that are embedded in the media file.
- bool UseEmbeddedSubtitleFonts();
- void UseEmbeddedSubtitleFonts(bool value);
+ PROPERTY(UseEmbeddedSubtitleFonts, bool, true);
///The folder where attachments such as fonts are stored (inside the app's temp folder).
- hstring AttachmentCacheFolderName();
- void AttachmentCacheFolderName(hstring const& value);
+ PROPERTY_CONST(AttachmentCacheFolderName, hstring, L"FFmpegAttachmentCache");
///The minimum amount of time a subtitle should be shown. Default is 0.
- winrt::Windows::Foundation::TimeSpan MinimumSubtitleDuration();
- void MinimumSubtitleDuration(winrt::Windows::Foundation::TimeSpan const& value);
+ PROPERTY_CONST(MinimumSubtitleDuration, TimeSpan, TimeSpan{ 0 });
///Each subtitle's duration is extended by this amount. Default is 0.
- winrt::Windows::Foundation::TimeSpan AdditionalSubtitleDuration();
- void AdditionalSubtitleDuration(winrt::Windows::Foundation::TimeSpan const& value);
+ PROPERTY_CONST(AdditionalSubtitleDuration, TimeSpan, TimeSpan{ 0 });
///Try to prevent overlapping subtitles when extending durations.
- bool PreventModifiedSubtitleDurationOverlap();
- void PreventModifiedSubtitleDurationOverlap(bool value);
+ PROPERTY(PreventModifiedSubtitleDurationOverlap, bool, true);
///Initial FFmpeg video filters. Might be changed later through FFmpegMediaSource.SetFFmpegVideoFilters().
///Using FFmpeg video filters will degrade playback performance, since they run on the CPU and not on the GPU.
- hstring FFmpegVideoFilters();
- void FFmpegVideoFilters(hstring const& value);
+ PROPERTY_CONST(FFmpegVideoFilters, hstring, {});
///Initial FFmpeg audio filters. Might be changed later through FFmpegMediaSource.SetFFmpegAudioFilters().
- hstring FFmpegAudioFilters();
- void FFmpegAudioFilters(hstring const& value);
+ PROPERTY_CONST(FFmpegAudioFilters, hstring, {});
///Downmix multi-channel audio streams to stereo format.
- bool DownmixAudioStreamsToStereo();
- void DownmixAudioStreamsToStereo(bool value);
+ PROPERTY(DownmixAudioStreamsToStereo, bool, false);
public:
//internal:
@@ -199,50 +190,66 @@ namespace winrt::FFmpegInteropX::implementation
bool IsExternalSubtitleParser;
/*Used to pass additional, specific options to external sub parsers*/
- PropertySet AdditionalFFmpegSubtitleOptions = {};
+ PropertySet AdditionalFFmpegSubtitleOptions = {nullptr};
private:
- bool m_PassthroughAudioMP3 = false;
- bool m_PassthroughAudioAAC = false;
- winrt::FFmpegInteropX::VideoDecoderMode m_VideoDecoderMode;
- int m_SystemDecoderH264MaxProfile = 0;
- int m_SystemDecoderH264MaxLevel = 0;
- int m_SystemDecoderHEVCMaxProfile = 0;
- int m_SystemDecoderHEVCMaxLevel = 0;
- bool m_VideoOutputAllowIyuv = false;
- bool m_VideoOutputAllow10bit = false;
- bool m_VideoOutputAllowBgra8 = false;
- bool m_VideoOutputAllowNv12 = false;
- unsigned int m_SkipErrors = 0;
- unsigned int m_MaxVideoThreads = 0;
- unsigned int m_MaxAudioThreads = 0;
- double m_MaxSupportedPlaybackRate = 0.0;
- unsigned int m_StreamBufferSize = 0;
- winrt::Windows::Foundation::Collections::PropertySet m_FFmpegOptions = {};
- winrt::Windows::Foundation::TimeSpan m_DefaultBufferTime{};
- winrt::Windows::Foundation::TimeSpan m_DefaultBufferTimeUri{};
- bool m_AutoSelectForcedSubtitles = false;
- bool m_OverrideSubtitleStyles = false;
- winrt::Windows::Media::Core::TimedTextRegion m_SubtitleRegion = {};
- winrt::Windows::Media::Core::TimedTextStyle m_SubtitleStyle = {};
- bool m_AutoCorrectAnsiSubtitles = false;
- winrt::Windows::Foundation::TimeSpan m_DefaultSubtitleDelay{};
- bool m_FastSeek = false;
- bool m_FastSeekCleanAudio = false;
- bool m_FastSeekSmartStreamSwitching = false;
- hstring m_DefaultAudioStreamName{};
- hstring m_DefaultSubtitleStreamName{};
- hstring m_DefaultExternalSubtitleStreamName{};
- bool m_UseEmbeddedSubtitleFonts = false;
- hstring m_AttachmentCacheFolderName{};
- winrt::Windows::Foundation::TimeSpan m_MinimumSubtitleDuration{};
- winrt::Windows::Foundation::TimeSpan m_AdditionalSubtitleDuration{};
- bool m_PreventModifiedSubtitleDurationOverlap = false;
- hstring m_FFmpegVideoFilters{};
- hstring m_FFmpegAudioFilters{};
- bool m_DownmixAudioStreamsToStereo = false;
- CharacterEncoding m_CharacterEncoding{ nullptr };
+ TimedTextRegion CreateDefaultSubtitleRegion()
+ {
+ auto region = TimedTextRegion();
+ TimedTextSize extent;
+ extent.Unit = TimedTextUnit::Percentage;
+ extent.Width = 100;
+ extent.Height = 88;
+ region.Extent(extent);
+ TimedTextPoint position;
+ position.Unit = TimedTextUnit::Pixels;
+ position.X = 0;
+ position.Y = 0;
+ region.Position(position);
+ region.DisplayAlignment(TimedTextDisplayAlignment::After);
+ region.Background(winrt::Windows::UI::Colors::Transparent());
+ region.ScrollMode(TimedTextScrollMode::Rollup);
+ region.TextWrapping(TimedTextWrapping::Wrap);
+ region.WritingMode(TimedTextWritingMode::LeftRightTopBottom);
+ region.IsOverflowClipped(false);
+ region.ZIndex(0);
+ TimedTextDouble LineHeight;
+ LineHeight.Unit = TimedTextUnit::Percentage;
+ LineHeight.Value = 100;
+ region.LineHeight(LineHeight);
+ TimedTextPadding padding;
+ padding.Unit = TimedTextUnit::Percentage;
+ padding.Start = 0;
+ padding.After = 0;
+ padding.Before = 0;
+ padding.End = 0;
+ region.Padding(padding);
+ region.Name(L"");
+ return region;
+ }
+
+ TimedTextStyle CreateDefaultSubtitleStyle()
+ {
+ auto style = TimedTextStyle();
+ style.FontFamily(L"default");
+ TimedTextDouble fontSize;
+ fontSize.Unit = TimedTextUnit::Percentage;
+ fontSize.Value = 100;
+ style.FontSize(fontSize);
+ style.LineAlignment(TimedTextLineAlignment::Center);
+ style.FontStyle(TimedTextFontStyle::Normal);
+ style.FontWeight(TimedTextWeight::Normal);
+ style.Foreground(winrt::Windows::UI::Colors::White());
+ style.Background(Windows::UI::Colors::Transparent());
+ TimedTextDouble outlineThickness;
+ outlineThickness.Unit = TimedTextUnit::Percentage;
+ outlineThickness.Value = 4.5;
+ style.OutlineThickness(outlineThickness);
+ style.FlowDirection(TimedTextFlowDirection::LeftToRight);
+ style.OutlineColor(winrt::Windows::UI::Color{ 0x80, 0, 0, 0 });
+ return style;
+ }
};
}
namespace winrt::FFmpegInteropX::factory_implementation
diff --git a/Source/NALPacketSampleProvider.cpp b/Source/NALPacketSampleProvider.cpp
index f8510efd96..ebe28c336b 100644
--- a/Source/NALPacketSampleProvider.cpp
+++ b/Source/NALPacketSampleProvider.cpp
@@ -37,9 +37,9 @@ NALPacketSampleProvider::~NALPacketSampleProvider()
{
}
-void NALPacketSampleProvider::Flush()
+void NALPacketSampleProvider::Flush(bool flushBuffers)
{
- CompressedSampleProvider::Flush();
+ CompressedSampleProvider::Flush(flushBuffers);
m_bHasSentExtradata = false;
}
diff --git a/Source/NALPacketSampleProvider.h b/Source/NALPacketSampleProvider.h
index e0e4b4ea0d..231fd20e9f 100644
--- a/Source/NALPacketSampleProvider.h
+++ b/Source/NALPacketSampleProvider.h
@@ -25,7 +25,7 @@ class NALPacketSampleProvider :
{
public:
virtual ~NALPacketSampleProvider();
- virtual void Flush() override;
+ virtual void Flush(bool flushBuffers) override;
NALPacketSampleProvider(
std::shared_ptr reader,
diff --git a/Source/ReferenceCue.cpp b/Source/ReferenceCue.cpp
index 921db65ea1..d092444745 100644
--- a/Source/ReferenceCue.cpp
+++ b/Source/ReferenceCue.cpp
@@ -1,43 +1,40 @@
#include "pch.h"
#include "ReferenceCue.h"
-#include "ReferenceCue.g.cpp"
-// Note: Remove this static_assert after copying these generated source files to your project.
-// This assertion exists to avoid compiling these generated source files directly.
-//static_assert(false, "Do not compile generated C++/WinRT source files directly");
+ReferenceCue::ReferenceCue(winrt::Windows::Media::Core::IMediaCue const& other)
+{
+ this->cueRef = other;
+ this->duration = other.Duration();
+ this->id = other.Id();
+ this->startTime = other.StartTime();
+}
-namespace winrt::FFmpegInteropX::implementation
-{
- ReferenceCue::ReferenceCue(Windows::Media::Core::IMediaCue const& other)
- {
- this->cueRef = other;
- this->duration = other.Duration();
- this->id = other.Id();
- this->startTime = other.StartTime();
- }
+void ReferenceCue::StartTime(winrt::Windows::Foundation::TimeSpan const& value)
+{
+ this->startTime = value;
+}
- void ReferenceCue::StartTime(Windows::Foundation::TimeSpan const& value)
- {
- this->startTime = value;
- }
- Windows::Foundation::TimeSpan ReferenceCue::StartTime()
- {
- return startTime;
- }
- void ReferenceCue::Duration(Windows::Foundation::TimeSpan const& value)
- {
- this->duration = value;
- }
- Windows::Foundation::TimeSpan ReferenceCue::Duration()
- {
- return duration;
- }
- void ReferenceCue::Id(hstring const& value)
- {
- id = value;
- }
- hstring ReferenceCue::Id()
- {
- return id;
- }
+winrt::Windows::Foundation::TimeSpan ReferenceCue::StartTime()
+{
+ return startTime;
+}
+void ReferenceCue::Duration(winrt::Windows::Foundation::TimeSpan const& value)
+{
+ this->duration = value;
+}
+winrt::Windows::Foundation::TimeSpan ReferenceCue::Duration()
+{
+ return duration;
+}
+void ReferenceCue::Id(winrt::hstring const& value)
+{
+ id = value;
+}
+winrt::hstring ReferenceCue::Id()
+{
+ return id;
+}
+winrt::Windows::Media::Core::IMediaCue ReferenceCue::CueRef()
+{
+ return cueRef;
}
diff --git a/Source/ReferenceCue.h b/Source/ReferenceCue.h
index 38731f4bec..d6c07b94ec 100644
--- a/Source/ReferenceCue.h
+++ b/Source/ReferenceCue.h
@@ -1,37 +1,26 @@
#pragma once
-#include "ReferenceCue.g.h"
// Note: Remove this static_assert after copying these generated source files to your project.
// This assertion exists to avoid compiling these generated source files directly.
//static_assert(false, "Do not compile generated C++/WinRT source files directly");
-namespace winrt::FFmpegInteropX::implementation
+struct ReferenceCue : winrt::implements
{
- struct ReferenceCue : ReferenceCueT
- {
- ReferenceCue() = default;
+ ReferenceCue(winrt::Windows::Media::Core::IMediaCue const& other);
+ void StartTime(winrt::Windows::Foundation::TimeSpan const& value);
+ winrt::Windows::Foundation::TimeSpan StartTime();
+ void Duration(winrt::Windows::Foundation::TimeSpan const& value);
+ winrt::Windows::Foundation::TimeSpan Duration();
+ void Id(winrt::hstring const& value);
+ winrt::hstring Id();
+ winrt::Windows::Media::Core::IMediaCue CueRef();
- ReferenceCue(Windows::Media::Core::IMediaCue const& other);
- void StartTime(Windows::Foundation::TimeSpan const& value);
- Windows::Foundation::TimeSpan StartTime();
- void Duration(Windows::Foundation::TimeSpan const& value);
- Windows::Foundation::TimeSpan Duration();
- void Id(hstring const& value);
- hstring Id();
+private:
+ winrt::hstring id{};
- public:
- winrt::hstring id{};
+ winrt::Windows::Foundation::TimeSpan duration{};
- winrt::Windows::Foundation::TimeSpan duration{};
+ winrt::Windows::Foundation::TimeSpan startTime{};
- winrt::Windows::Foundation::TimeSpan startTime{};
-
- winrt::Windows::Media::Core::IMediaCue cueRef = { nullptr };
- };
-}
-namespace winrt::FFmpegInteropX::factory_implementation
-{
- struct ReferenceCue : ReferenceCueT
- {
- };
-}
+ winrt::Windows::Media::Core::IMediaCue cueRef = { nullptr };
+};
diff --git a/Source/StreamBuffer.h b/Source/StreamBuffer.h
new file mode 100644
index 0000000000..38ee69436f
--- /dev/null
+++ b/Source/StreamBuffer.h
@@ -0,0 +1,356 @@
+//*****************************************************************************
+//
+// Copyright 2015 Microsoft Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http ://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+//*****************************************************************************
+
+#pragma once
+
+#include "FFmpegReader.h"
+#include "MediaSourceConfig.h"
+#include "MediaSampleProvider.h"
+
+class FFmpegReader;
+
+class StreamBuffer
+{
+public:
+ StreamBuffer(int streamIndex, winrt::FFmpegInteropX::MediaSourceConfig const& config)
+ : config(config)
+ {
+ StreamIndex = streamIndex;
+ }
+
+ int StreamIndex;
+
+ void QueuePacket(AVPacket* packet)
+ {
+ std::lock_guard lock(mutex);
+ buffer.push_back(packet);
+ bufferSize += packet->size;
+ }
+
+ bool ReadUntilNotEmpty(std::shared_ptr reader)
+ {
+ while (IsEmpty())
+ {
+ if (reader->ReadPacketForStream(this) < 0)
+ {
+ DebugMessage(L"GetNextPacket reaching EOF\n");
+ break;
+ }
+ }
+ return !IsEmpty();
+ }
+
+ bool SkipUntilTimestamp(std::shared_ptr reader, LONGLONG target)
+ {
+ bool foundPacket = false;
+
+ while (!foundPacket)
+ {
+ if (ReadUntilNotEmpty(reader))
+ {
+ // peek next packet and check pts value
+ auto packet = PeekPacket();
+
+ auto pts = packet->pts != AV_NOPTS_VALUE ? packet->pts : packet->dts;
+ if (pts != AV_NOPTS_VALUE && packet->duration != AV_NOPTS_VALUE)
+ {
+ auto packetEnd = pts + packet->duration;
+ if (packet->duration > 0 ? packetEnd <= target : packetEnd < target)
+ {
+ DropPackets(1);
+ }
+ else
+ {
+ foundPacket = true;
+ break;
+ }
+ }
+ else
+ {
+ break;
+ }
+ }
+ else
+ {
+ // no more packet found
+ break;
+ }
+ }
+
+ return foundPacket;
+ }
+
+ bool IsEmpty()
+ {
+ std::lock_guard lock(mutex);
+ return buffer.empty();
+ }
+
+ bool IsFull(MediaSampleProvider* sampleProvider)
+ {
+ std::lock_guard lock(mutex);
+ if (buffer.empty())
+ {
+ return false;
+ }
+ auto maxSize = config.ReadAheadBufferSize();
+ auto maxDuration = config.ReadAheadBufferDuration();
+
+ bool full = maxSize >= 0 && (long long)bufferSize > maxSize;
+ if (!full && maxDuration.count() >= 0 && buffer.size() > 1)
+ {
+ auto firstPacket = buffer.front();
+ auto lastPacket = buffer.back();
+ auto firstPts = firstPacket->pts != AV_NOPTS_VALUE ? firstPacket->pts : firstPacket->dts;
+ auto lastPts = lastPacket->pts != AV_NOPTS_VALUE ? lastPacket->pts : lastPacket->dts;
+
+ if (firstPts != AV_NOPTS_VALUE && lastPts != AV_NOPTS_VALUE)
+ {
+ auto duration = sampleProvider->ConvertDuration(lastPts - firstPts);
+ full = duration > maxDuration;
+ }
+ }
+ return full;
+ }
+
+ AVPacket* PopPacket()
+ {
+ std::lock_guard lock(mutex);
+
+ AVPacket* packet = NULL;
+ if (!buffer.empty())
+ {
+ packet = buffer.front();
+ buffer.pop_front();
+ bufferSize -= packet->size;
+ }
+
+ return packet;
+ }
+
+ AVPacket* PeekPacket()
+ {
+ std::lock_guard lock(mutex);
+
+ AVPacket* packet = NULL;
+ if (!buffer.empty())
+ {
+ packet = buffer.front();
+ }
+
+ return packet;
+ }
+
+ AVPacket* PeekPacketIndex(int index)
+ {
+ std::lock_guard lock(mutex);
+ return buffer.at(index);
+ }
+
+ int TryFindPacketIndex(LONGLONG pts, LONGLONG& resultPts, bool requireKeyFrame, bool fastSeek, bool isForwardSeek)
+ {
+ std::lock_guard lock(mutex);
+
+ if (buffer.size() == 0)
+ {
+ return -1;
+ }
+
+ auto firstPacket = buffer.front();
+ auto lastPacket = buffer.back();
+ auto firstPts = GetTimestamp(firstPacket);
+ auto lastPts = GetTimestamp(lastPacket);
+
+ if (firstPts != AV_NOPTS_VALUE && lastPts != AV_NOPTS_VALUE && (firstPts > pts || lastPts < pts))
+ {
+ return -1;
+ }
+
+ if (requireKeyFrame)
+ {
+ return TryFindClosestKeyframe(pts, isForwardSeek, fastSeek, resultPts);
+ }
+ else
+ {
+ return TryFindClosestPacket(pts);
+ }
+ }
+
+ int TryFindClosestPacket(long long target)
+ {
+ int index = 0;
+ int result = -1;
+ for (auto packet : buffer)
+ {
+ auto pts = GetTimestamp(packet);
+ if (pts != AV_NOPTS_VALUE)
+ {
+ if (pts + packet->duration >= target)
+ {
+ result = index;
+ break;
+ }
+ }
+ index++;
+ }
+ return result;
+ }
+
+ int TryFindClosestKeyframe(long long target, bool isForwardSeek, bool fastSeek, LONGLONG& resultPts)
+ {
+ bool hasTarget = false;
+ int index = 0;
+ int packetBeforeIndex = -1;
+ int packetAfterIndex = -1;
+ long long packetBeforePts = -1;
+ long long packetAfterPts = -1;
+ long long lastPacketPts = AV_NOPTS_VALUE;
+ for (auto packet : buffer)
+ {
+ auto pts = GetTimestamp(packet);
+ if (pts == AV_NOPTS_VALUE && packet->flags & AV_PKT_FLAG_KEY)
+ {
+ // in some streams, key frames do not have pts/dts. use previous packet value instead.
+ pts = lastPacketPts;
+ }
+ if (pts != AV_NOPTS_VALUE)
+ {
+ if (pts <= target && packet->flags & AV_PKT_FLAG_KEY)
+ {
+ packetBeforeIndex = index;
+ packetBeforePts = pts;
+ }
+
+ if (pts + packet->duration >= target)
+ {
+ hasTarget = true;
+ if (packet->flags & AV_PKT_FLAG_KEY)
+ {
+ packetAfterIndex = index;
+ packetAfterPts = pts;
+ break;
+ }
+ }
+ }
+ lastPacketPts = pts;
+ index++;
+ }
+
+ if (hasTarget)
+ {
+ if (!fastSeek)
+ {
+ // no fast seek: use packet before or decode from current position
+ if (packetBeforeIndex >= 0)
+ {
+ return packetBeforeIndex;
+ }
+ else if (hasTarget)
+ {
+ return 0;
+ }
+ else
+ {
+ return -1;
+ }
+ }
+ else
+ {
+ if (packetBeforeIndex >= 0 && packetAfterIndex >= 0)
+ {
+ // keyframes before and after found. select closest.
+ auto diffBefore = target - packetBeforePts;
+ auto diffAfter = packetAfterPts - target;
+ if (diffBefore <= diffAfter)
+ {
+ resultPts = packetBeforePts;
+ return packetBeforeIndex;
+ }
+ else
+ {
+ resultPts = packetAfterPts;
+ return packetAfterIndex;
+ }
+ }
+ else if (packetBeforeIndex >= 0)
+ {
+ // only keyframe before position found. return it.
+ resultPts = packetBeforePts;
+ return packetBeforeIndex;
+ }
+ else
+ {
+ // only keyframe after position found. use it or continue from current position.
+ auto diffCurrent = target - GetTimestamp(buffer[0]);
+ auto diffAfter = packetAfterPts - target;
+ if (diffCurrent < diffAfter && !isForwardSeek)
+ {
+ return 0;
+ }
+ else
+ {
+ resultPts = packetAfterPts;
+ return packetAfterIndex;
+ }
+ }
+ }
+ }
+ else
+ {
+ // target not found in buffer range
+ return -1;
+ }
+ }
+
+ long long GetTimestamp(AVPacket* packet)
+ {
+ return packet->pts != AV_NOPTS_VALUE ? packet->pts : packet->dts;
+ }
+
+ void Flush()
+ {
+ std::lock_guard lock(mutex);
+ while (!buffer.empty())
+ {
+ auto packet = buffer.front();
+ bufferSize -= packet->size;
+ buffer.pop_front();
+
+ av_packet_free(&packet);
+ }
+ }
+
+ void DropPackets(int count)
+ {
+ std::lock_guard lock(mutex);
+ for (int i = 0; i < count; i++)
+ {
+ auto packet = buffer.front();
+ bufferSize -= packet->size;
+ buffer.pop_front();
+
+ av_packet_free(&packet);
+ }
+ }
+
+private:
+ std::deque buffer;
+ std::mutex mutex;
+ size_t bufferSize = 0;
+ winrt::FFmpegInteropX::MediaSourceConfig config{nullptr};
+};
diff --git a/Source/SubtitleProvider.h b/Source/SubtitleProvider.h
index 2d3644eb0a..88a821dbad 100644
--- a/Source/SubtitleProvider.h
+++ b/Source/SubtitleProvider.h
@@ -50,15 +50,13 @@ class SubtitleProvider :
if (!m_config.as()->IsExternalSubtitleParser)
{
- if (timedMetadataKind == TimedMetadataKind::ImageSubtitle)
- {
- SubtitleTrack.CueEntered(weak_handler(this, &SubtitleProvider::OnCueEntered));
- }
SubtitleTrack.TrackFailed(weak_handler(this, &SubtitleProvider::OnTrackFailed));
}
InitializeStreamInfo();
+ m_pAvStream->discard = AVDISCARD_DEFAULT;
+
return S_OK;
}
@@ -83,97 +81,94 @@ class SubtitleProvider :
virtual void QueuePacket(AVPacket* packet) override
{
- if (m_isEnabled)
+ try
{
- try
- {
- TimeSpan position = ConvertPosition(packet->pts);
- TimeSpan duration = ConvertDuration(packet->duration);
+ TimeSpan position = ConvertPosition(packet->pts);
+ TimeSpan duration = ConvertDuration(packet->duration);
- auto cue = CreateCue(packet, &position, &duration);
- if (cue && position.count() >= 0)
+ auto cue = CreateCue(packet, &position, &duration);
+ if (cue && position.count() >= 0)
+ {
+ // apply subtitle delay
+ position += SubtitleDelay;
+ if (position.count() < 0)
{
- // apply subtitle delay
- position += SubtitleDelay;
- if (position.count() < 0)
- {
- negativePositionCues.emplace_back(cue, position.count());
- position = std::chrono::seconds(0);
- }
+ negativePositionCues.emplace_back(cue, position.count());
+ position = std::chrono::seconds(0);
+ }
- // clip previous extended duration cue, if there is one
- if (lastExtendedDurationCue && m_config.PreventModifiedSubtitleDurationOverlap() &&
- lastExtendedDurationCue.StartTime() + lastExtendedDurationCue.Duration() > position)
+ // clip previous extended duration cue, if there is one
+ if (lastExtendedDurationCue && m_config.PreventModifiedSubtitleDurationOverlap() &&
+ lastExtendedDurationCue.StartTime() + lastExtendedDurationCue.Duration() > position)
+ {
+ auto diff = position - (lastExtendedDurationCue.StartTime() + lastExtendedDurationCue.Duration());
+ auto newDuration = lastExtendedDurationCue.Duration() + diff;
+ if (newDuration.count() > 0)
{
- auto diff = position - (lastExtendedDurationCue.StartTime() + lastExtendedDurationCue.Duration());
- auto newDuration = lastExtendedDurationCue.Duration() + diff;
- if (newDuration.count() > 0)
+ lastExtendedDurationCue.Duration() = newDuration;
+ if (!m_config.as().get()->IsExternalSubtitleParser)
{
- lastExtendedDurationCue.Duration() = newDuration;
- if (!m_config.as().get()->IsExternalSubtitleParser)
- {
- pendingChangedDurationCues.push_back(lastExtendedDurationCue);
- }
- }
- else
- {
- // weird subtitle timings, just leave it as is
+ pendingChangedDurationCues.push_back(lastExtendedDurationCue);
}
}
+ else
+ {
+ // weird subtitle timings, just leave it as is
+ }
+ }
- lastExtendedDurationCue = nullptr;
+ lastExtendedDurationCue = nullptr;
- if (duration.count() < 0)
+ if (duration.count() < 0)
+ {
+ duration = TimeSpan(InfiniteDuration);
+ }
+ else
+ {
+ if (m_config.AdditionalSubtitleDuration().count() != 0)
{
- duration = TimeSpan(InfiniteDuration);
+ duration += m_config.AdditionalSubtitleDuration();
+ lastExtendedDurationCue = cue;
}
- else
+ if (duration < m_config.MinimumSubtitleDuration())
{
- if (m_config.AdditionalSubtitleDuration().count() != 0)
- {
- duration += m_config.AdditionalSubtitleDuration();
- lastExtendedDurationCue = cue;
- }
- if (duration < m_config.MinimumSubtitleDuration())
- {
- duration = m_config.MinimumSubtitleDuration();
- lastExtendedDurationCue = cue;
- }
+ duration = m_config.MinimumSubtitleDuration();
+ lastExtendedDurationCue = cue;
}
+ }
+
+ cue.StartTime(position);
+ cue.Duration(duration);
+ AddCue(cue);
- cue.StartTime(position);
- cue.Duration(duration);
- AddCue(cue);
+ if (!m_config.as()->IsExternalSubtitleParser)
+ {
+ isPreviousCueInfiniteDuration = duration.count() >= InfiniteDuration;
+ }
+ else
+ {
+ // fixup infinite duration cues for external subs
+ if (isPreviousCueInfiniteDuration)
+ {
+ infiniteDurationCue.Duration(TimeSpan(cue.StartTime() - infiniteDurationCue.StartTime()));
+ }
- if (!m_config.as()->IsExternalSubtitleParser)
+ if (duration.count() >= InfiniteDuration)
{
- isPreviousCueInfiniteDuration = duration.count() >= InfiniteDuration;
+ isPreviousCueInfiniteDuration = true;
+ infiniteDurationCue = cue;
}
else
{
- // fixup infinite duration cues for external subs
- if (isPreviousCueInfiniteDuration)
- {
- infiniteDurationCue.Duration(TimeSpan(cue.StartTime() - infiniteDurationCue.StartTime()));
- }
-
- if (duration.count() >= InfiniteDuration)
- {
- isPreviousCueInfiniteDuration = true;
- infiniteDurationCue = cue;
- }
- else
- {
- isPreviousCueInfiniteDuration = false;
- infiniteDurationCue = nullptr;
- }
+ isPreviousCueInfiniteDuration = false;
+ infiniteDurationCue = nullptr;
}
}
}
- catch (...)
- {
- OutputDebugString(L"Failed to create subtitle cue.");
- }
+ }
+ catch (...)
+ {
+ OutputDebugString(L"Failed to create subtitle cue.");
}
av_packet_free(&packet);
}
@@ -239,13 +234,14 @@ class SubtitleProvider :
void DispatchCueToTrack(IMediaCue const& cue)
{
- if (m_config.as()->IsExternalSubtitleParser)
+ if (m_config.as()->IsExternalSubtitleParser || !IsEnabled())
{
SubtitleTrack.AddCue(cue);
}
else if (isPreviousCueInfiniteDuration)
{
- pendingRefCues.push_back(ReferenceCue(cue));
+ IMediaCue refCue = winrt::make(cue);
+ pendingRefCues.push_back(refCue);
TriggerUpdateCues();
}
else
@@ -260,14 +256,26 @@ class SubtitleProvider :
UNREFERENCED_PARAMETER(sender);
std::lock_guard lock(mutex);
try {
- //remove all cues from subtitle track
- while (SubtitleTrack.Cues().Size() > 0)
+ //remove all previous cues from subtitle track
+ std::vector remove;
+ auto enteredCue = args.Cue();
+ auto startTime = enteredCue.StartTime();
+ for (auto cue : SubtitleTrack.Cues())
{
- SubtitleTrack.RemoveCue(SubtitleTrack.Cues().GetAt(0));
+ if (cue.StartTime() < startTime)
+ {
+ remove.push_back(cue);
+ }
+ }
+
+ for (auto& cue : remove)
+ {
+ SubtitleTrack.RemoveCue(cue);
}
- auto refCue = static_cast(args.Cue());
- SubtitleTrack.AddCue(refCue);
- referenceTrack.RemoveCue(refCue);
+
+ auto refCue = winrt::get_self(enteredCue);
+ SubtitleTrack.AddCue(refCue->CueRef());
+ referenceTrack.RemoveCue(enteredCue);
}
catch (...)
{
@@ -281,10 +289,11 @@ class SubtitleProvider :
try
{
//cleanup old cues to free memory
+ auto startTime = args.Cue().StartTime();
std::vector remove;
for (auto cue : SubtitleTrack.Cues())
{
- if (cue.StartTime() + cue.Duration() < args.Cue().StartTime())
+ if (cue.StartTime() + cue.Duration() < startTime)
{
remove.push_back(cue);
}
@@ -454,8 +463,23 @@ class SubtitleProvider :
OutputDebugString(L"Subtitle track error.");
}
+public:
+
+ void EnableStream() override
+ {
+ DebugMessage(L"EnableStream\n");
+ m_isEnabled = true;
+ }
+
+ void DisableStream() override
+ {
+ DebugMessage(L"DisableStream\n");
+ m_isEnabled = false;
+ }
+
void ClearSubtitles()
{
+ std::lock_guard lock(mutex);
try
{
pendingCues.clear();
@@ -483,12 +507,12 @@ class SubtitleProvider :
public:
- void Flush() override
+ void Flush(bool flushBuffers) override
{
- if (!m_config.as()->IsExternalSubtitleParser)
+ CompressedSampleProvider::Flush(flushBuffers);
+
+ if (!m_config.as()->IsExternalSubtitleParser && flushBuffers)
{
- CompressedSampleProvider::Flush();
-
std::lock_guard lock(mutex);
if (dispatcher)
@@ -502,9 +526,11 @@ class SubtitleProvider :
}
}
+protected:
+ std::recursive_mutex mutex;
+
private:
- std::recursive_mutex mutex;
int cueCount = 0;
std::vector pendingCues;
std::vector pendingRefCues;
diff --git a/Source/SubtitleProviderBitmap.h b/Source/SubtitleProviderBitmap.h
index 80bc142aaf..f5e2433b1c 100644
--- a/Source/SubtitleProviderBitmap.h
+++ b/Source/SubtitleProviderBitmap.h
@@ -3,7 +3,6 @@
#include "SubtitleProvider.h"
#include
-
using namespace winrt::Windows::Graphics::Imaging;
using namespace winrt::Windows::Media::Core;
@@ -14,13 +13,26 @@ class SubtitleProviderBitmap : public SubtitleProvider
SubtitleProviderBitmap(std::shared_ptr reader,
AVFormatContext* avFormatCtx,
AVCodecContext* avCodecCtx,
- winrt::FFmpegInteropX::MediaSourceConfig const& config,
+ MediaSourceConfig const& config,
int index,
winrt::Windows::System::DispatcherQueue const& dispatcher)
: SubtitleProvider(reader, avFormatCtx, avCodecCtx, config, index, TimedMetadataKind::ImageSubtitle, dispatcher)
{
}
+ virtual HRESULT Initialize() override
+ {
+ auto hr = SubtitleProvider::Initialize();
+
+ if (SUCCEEDED(hr))
+ {
+ SubtitleTrack.CueEntered(weak_handler(this, &SubtitleProviderBitmap::OnCueEntered));
+ SubtitleTrack.CueExited(weak_handler(this, &SubtitleProviderBitmap::OnCueExited));
+ }
+
+ return S_OK;
+ }
+
virtual void NotifyVideoFrameSize(int width, int height, double aspectRatio) override
{
videoWidth = width;
@@ -37,103 +49,192 @@ class SubtitleProviderBitmap : public SubtitleProvider
virtual IMediaCue CreateCue(AVPacket* packet, TimeSpan* position, TimeSpan* duration) override
{
- // only decode image subtitles if the stream is selected
- if (!IsEnabled())
+ AVSubtitle* subtitle = (AVSubtitle*)av_mallocz(sizeof(AVSubtitle));
+ if (!subtitle)
{
return nullptr;
}
- AVSubtitle subtitle;
int gotSubtitle = 0;
- auto result = avcodec_decode_subtitle2(m_pAvCodecCtx, &subtitle, &gotSubtitle, packet);
+ auto result = avcodec_decode_subtitle2(m_pAvCodecCtx, subtitle, &gotSubtitle, packet);
if (result > 0 && gotSubtitle)
{
- if (subtitle.start_display_time > 0)
+ if (subtitle->start_display_time > 0)
{
- *position = TimeSpan(position->count() + (long long)10000 * subtitle.start_display_time);
+ *position = TimeSpan{ position->count() + (long long)10000 * subtitle->start_display_time };
}
- *duration = TimeSpan((long long)10000 * subtitle.end_display_time);
+ *duration = TimeSpan{ (long long)10000 * subtitle->end_display_time };
- if (subtitle.num_rects <= 0)
+ if (subtitle->num_rects <= 0)
{
- if (!dummyBitmap)
+ // inserty dummy cue
+ ImageCue cue;
+ cue.SoftwareBitmap(GetDummyBitmap());
+ avsubtitle_free(subtitle);
+ av_freep(subtitle);
+
+ return cue;
+ }
+ else
+ {
+ int width, height, offsetX, offsetY;
+ TimedTextSize cueSize;
+ TimedTextPoint cuePosition;
+ if (subtitle->num_rects > 0 && CheckSize(subtitle, width, height, offsetX, offsetY, cueSize, cuePosition))
{
- dummyBitmap = SoftwareBitmap(BitmapPixelFormat::Bgra8, 16, 16, BitmapAlphaMode::Premultiplied);
+ auto id = winrt::to_hstring(nextId++);
+ map[id] = subtitle;
+
+ ImageCue cue;
+ cue.Id(id);
+ return cue;
}
+ else if (subtitle->num_rects > 0)
+ {
+ avsubtitle_free(subtitle);
+ av_freep(subtitle);
+ OutputDebugString(L"Error: Invalid subtitle size received.");
+ }
+ }
+ }
+ else if (result <= 0)
+ {
+ avsubtitle_free(subtitle);
+ av_freep(subtitle);
+ OutputDebugString(L"Failed to decode subtitle.");
+ }
+ return nullptr;
+ }
- // inserty dummy cue
- ImageCue cue = ImageCue();
- cue.SoftwareBitmap(dummyBitmap);
- avsubtitle_free(&subtitle);
+public:
- return cue;
+ void Flush(bool flushBuffers) override
+ {
+ SubtitleProvider::Flush(flushBuffers);
+
+ if (!m_config.as()->IsExternalSubtitleParser && flushBuffers)
+ {
+ for (auto entry : map)
+ {
+ auto subtitle = entry.second;
+ avsubtitle_free(subtitle);
+ av_freep(subtitle);
}
+ map.clear();
+ }
+ }
- int width, height, offsetX, offsetY;
- TimedTextSize cueSize;
- TimedTextPoint cuePosition;
- if (subtitle.num_rects > 0 && CheckSize(subtitle, width, height, offsetX, offsetY, cueSize, cuePosition))
+private:
+
+ void OnCueEntered(TimedMetadataTrack sender, MediaCueEventArgs args)
+ {
+ std::lock_guard lock(mutex);
+ try
+ {
+ //cleanup old cues to free memory
+ std::vector remove;
+ for each (auto cue in SubtitleTrack.Cues())
{
- auto bitmap = SoftwareBitmap(BitmapPixelFormat::Bgra8, width, height, BitmapAlphaMode::Straight);
+ if (cue.StartTime() + cue.Duration() < args.Cue().StartTime())
{
- auto buffer = bitmap.LockBuffer(BitmapBufferAccessMode::Write);
- auto reference = buffer.CreateReference();
- BYTE* pixels = reference.data();
+ remove.push_back(cue);
+ }
+ }
+
+ for each (auto cue in remove)
+ {
+ SubtitleTrack.RemoveCue(cue);
+ }
- auto plane = buffer.GetPlaneDescription(0);
+ auto c = args.Cue();
+ auto cue = args.Cue().try_as();
+ if (cue && !cue.Id().empty())
+ {
+ auto subtitle = map.at(cue.Id());
- for (unsigned int i = 0; i < subtitle.num_rects; i++)
+ int width, height, offsetX, offsetY;
+ TimedTextSize cueSize;
+ TimedTextPoint cuePosition;
+ if (subtitle->num_rects > 0 && CheckSize(subtitle, width, height, offsetX, offsetY, cueSize, cuePosition))
+ {
+ auto bitmap = SoftwareBitmap(BitmapPixelFormat::Bgra8, width, height, BitmapAlphaMode::Straight);
{
- auto rect = subtitle.rects[i];
+ auto buffer = bitmap.LockBuffer(BitmapBufferAccessMode::Write);
+ auto reference = buffer.CreateReference();
+ BYTE* pixels = reference.data();
+
+ auto plane = buffer.GetPlaneDescription(0);
- for (int y = 0; y < rect->h; y++)
+ for (unsigned int i = 0; i < subtitle->num_rects; i++)
{
- for (int x = 0; x < rect->w; x++)
+ auto rect = subtitle->rects[i];
+
+ for (int y = 0; y < rect->h; y++)
{
- auto inPointer = rect->data[0] + y * rect->linesize[0] + x;
- auto color = inPointer[0];
- if (color < rect->nb_colors)
+ for (int x = 0; x < rect->w; x++)
{
- auto rgba = ((uint32_t*)rect->data[1])[color];
- auto outPointer = pixels + plane.StartIndex + plane.Stride * ((y + rect->y) - offsetY) + 4 * ((x + rect->x) - offsetX);
- ((uint32_t*)outPointer)[0] = rgba;
- }
- else
- {
- OutputDebugString(L"Error: Illegal subtitle color.");
+ auto inPointer = rect->data[0] + y * rect->linesize[0] + x;
+ auto color = inPointer[0];
+ if (color < rect->nb_colors)
+ {
+ auto rgba = ((uint32_t*)rect->data[1])[color];
+ auto outPointer = pixels + plane.StartIndex + plane.Stride * ((y + rect->y) - offsetY) + 4 * ((x + rect->x) - offsetX);
+ ((uint32_t*)outPointer)[0] = rgba;
+ }
+ else
+ {
+ OutputDebugString(L"Error: Illegal subtitle color.");
+ }
}
}
}
}
- }
-
- ImageCue cue = ImageCue();
- cue.SoftwareBitmap(SoftwareBitmap::Convert(bitmap, BitmapPixelFormat::Bgra8, BitmapAlphaMode::Premultiplied));
- cue.Position(cuePosition);
- cue.Extent(cueSize);
- avsubtitle_free(&subtitle);
-
- return cue;
+ auto converted = SoftwareBitmap::Convert(bitmap, BitmapPixelFormat::Bgra8, BitmapAlphaMode::Premultiplied);
+ cue.SoftwareBitmap(converted);
+ cue.Position(cuePosition);
+ cue.Extent(cueSize);
+ }
+ else if (subtitle->num_rects > 0)
+ {
+ OutputDebugString(L"Error: Invalid subtitle size received.");
+ }
}
- else if (subtitle.num_rects > 0)
+ }
+ catch (...)
+ {
+ OutputDebugString(L"Failed to render cue.");
+ }
+ }
+
+ void OnCueExited(TimedMetadataTrack sender, MediaCueEventArgs args)
+ {
+ std::lock_guard lock(mutex);
+ try
+ {
+ auto cue = args.Cue().try_as();
+ if (cue && !cue.Id().empty())
{
- OutputDebugString(L"Error: Invalid subtitle size received.");
+ cue.SoftwareBitmap(GetDummyBitmap());
}
-
- avsubtitle_free(&subtitle);
}
- else if (result <= 0)
+ catch (...)
{
- OutputDebugString(L"Failed to decode subtitle.");
+ OutputDebugString(L"Failed to cleanup old cues.");
}
-
- return nullptr;
}
-private:
+ SoftwareBitmap GetDummyBitmap()
+ {
+ if (!dummyBitmap)
+ {
+ dummyBitmap = SoftwareBitmap(BitmapPixelFormat::Bgra8, 16, 16, BitmapAlphaMode::Premultiplied);
+ }
+
+ return dummyBitmap;
+ }
- bool CheckSize(AVSubtitle& subtitle, int& width, int& height, int& offsetX, int& offsetY, TimedTextSize& cueSize, TimedTextPoint& cuePosition)
+ bool CheckSize(AVSubtitle* subtitle, int& width, int& height, int& offsetX, int& offsetY,TimedTextSize& cueSize,TimedTextPoint& cuePosition)
{
if (!GetInitialSize())
{
@@ -142,9 +243,9 @@ class SubtitleProviderBitmap : public SubtitleProvider
// get actual extent of subtitle rects
int minX = subtitleWidth, minY = subtitleHeight, maxW = 0, maxH = 0;
- for (unsigned int i = 0; i < subtitle.num_rects; i++)
+ for (unsigned int i = 0; i < subtitle->num_rects; i++)
{
- auto rect = subtitle.rects[i];
+ auto rect = subtitle->rects[i];
minX = min(minX, rect->x);
minY = min(minY, rect->y);
maxW = max(maxW, rect->x + rect->w);
@@ -179,12 +280,12 @@ class SubtitleProviderBitmap : public SubtitleProvider
}
}
- cueSize.Unit = TimedTextUnit::Percentage;
+ cueSize.Unit =TimedTextUnit::Percentage;
cueSize.Width = (double)width * 100 / subtitleWidth;
cueSize.Height = (double)height * 100 / targetHeight;
// for some reason, all bitmap cues are moved down by 5% by uwp. we need to compensate for that.
- cuePosition.Unit = TimedTextUnit::Percentage;
+ cuePosition.Unit =TimedTextUnit::Percentage;
cuePosition.X = (double)offsetX * 100 / subtitleWidth;
cuePosition.Y = ((double)(offsetY - heightOffset) * 100 / targetHeight) - 5;
@@ -225,5 +326,6 @@ class SubtitleProviderBitmap : public SubtitleProvider
int subtitleHeight = 0;
int optimalHeight = 0;
SoftwareBitmap dummyBitmap = { nullptr };
-
+ std::map map;
+ int nextId = 0;
};
diff --git a/Source/UncompressedSampleProvider.cpp b/Source/UncompressedSampleProvider.cpp
index 71edbffd28..c5c64aae41 100644
--- a/Source/UncompressedSampleProvider.cpp
+++ b/Source/UncompressedSampleProvider.cpp
@@ -199,9 +199,9 @@ HRESULT UncompressedSampleProvider::FeedPacketToDecoder(int64_t& firstPacketPos)
return hr;
}
-void UncompressedSampleProvider::Flush()
+void UncompressedSampleProvider::Flush(bool flushBuffers)
{
- MediaSampleProvider::Flush();
+ MediaSampleProvider::Flush(flushBuffers);
// after seek we need to get first packet pts again
hasNextFramePts = false;
diff --git a/Source/UncompressedSampleProvider.h b/Source/UncompressedSampleProvider.h
index 6f2d53a0e3..9231f4562d 100644
--- a/Source/UncompressedSampleProvider.h
+++ b/Source/UncompressedSampleProvider.h
@@ -59,7 +59,7 @@ class UncompressedSampleProvider abstract : public MediaSampleProvider
public:
- virtual void Flush() override;
+ virtual void Flush(bool flushBuffers) override;
private:
INT64 nextFramePts = 0;
diff --git a/Source/UncompressedVideoSampleProvider.h b/Source/UncompressedVideoSampleProvider.h
index d0315593e4..a001d7b69e 100644
--- a/Source/UncompressedVideoSampleProvider.h
+++ b/Source/UncompressedVideoSampleProvider.h
@@ -47,10 +47,10 @@ class UncompressedVideoSampleProvider : public UncompressedSampleProvider
{
public:
virtual ~UncompressedVideoSampleProvider();
- virtual void Flush() override
+ virtual void Flush(bool flushBuffers) override
{
hasFirstInterlacedFrame = false;
- UncompressedSampleProvider::Flush();
+ UncompressedSampleProvider::Flush(flushBuffers);
}
UncompressedVideoSampleProvider(
diff --git a/Source/VideoStreamInfo.h b/Source/VideoStreamInfo.h
index ac30885d88..0e670d79b3 100644
--- a/Source/VideoStreamInfo.h
+++ b/Source/VideoStreamInfo.h
@@ -50,10 +50,9 @@ namespace winrt::FFmpegInteropX::implementation
FFmpegInteropX::HardwareDecoderStatus hardwareDecoderStatus;
FFmpegInteropX::DecoderEngine decoderEngine;
- bool SetDefault()
+ void SetDefault()
{
isDefault = true;
- return isDefault;
}
};
}
diff --git a/Source/cpp.hint b/Source/cpp.hint
index 939b6c8030..a0b14a9226 100644
--- a/Source/cpp.hint
+++ b/Source/cpp.hint
@@ -1,4 +1,5 @@
// Hint files help the Visual Studio IDE interpret Visual C++ identifiers
// such as names of functions and macros.
// For more information see https://go.microsoft.com/fwlink/?linkid=865984
-#define InspectableClass(runtimeClassName, trustLevel) public: static _Null_terminated_ const wchar_t* STDMETHODCALLTYPE InternalGetRuntimeClassName() throw() { static_assert((RuntimeClassT::ClassFlags::value & ::Microsoft::WRL::WinRtClassicComMix) == ::Microsoft::WRL::WinRt || (RuntimeClassT::ClassFlags::value & ::Microsoft::WRL::WinRtClassicComMix) == ::Microsoft::WRL::WinRtClassicComMix, "'InspectableClass' macro must not be used with ClassicCom clasess."); static_assert(__is_base_of(::Microsoft::WRL::Details::RuntimeClassBase, RuntimeClassT), "'InspectableClass' macro can only be used with ::Windows::WRL::RuntimeClass types"); static_assert(!__is_base_of(IActivationFactory, RuntimeClassT), "Incorrect usage of IActivationFactory interface. Make sure that your RuntimeClass doesn't implement IActivationFactory interface use ::Windows::WRL::ActivationFactory instead or 'InspectableClass' macro is not used on ::Windows::WRL::ActivationFactory"); return runtimeClassName; } static ::TrustLevel STDMETHODCALLTYPE InternalGetTrustLevel() throw() { return trustLevel; } STDMETHOD(GetRuntimeClassName)(_Out_ HSTRING* runtimeName) override { *runtimeName = nullptr; HRESULT hr = S_OK; auto name = InternalGetRuntimeClassName(); if (name != nullptr) { hr = ::WindowsCreateString(name, static_cast(::wcslen(name)), runtimeName); } return hr; } STDMETHOD(GetTrustLevel)(_Out_ ::TrustLevel* trustLvl) override { *trustLvl = trustLevel; return S_OK; } STDMETHOD(GetIids)(_Out_ ULONG *iidCount, _When_(*iidCount == 0, _At_(*iids, _Post_null_)) _When_(*iidCount > 0, _At_(*iids, _Post_notnull_)) _Result_nullonfailure_ IID **iids) override { return RuntimeClassT::GetIids(iidCount, iids); } STDMETHOD(QueryInterface)(REFIID riid, _Outptr_result_nullonfailure_ void **ppvObject) override { bool handled = false; HRESULT hr = this->CustomQueryInterface(riid, ppvObject, &handled); if (FAILED(hr) || handled) return hr; return RuntimeClassT::QueryInterface(riid, ppvObject); } STDMETHOD_(ULONG, Release)() override { return RuntimeClassT::Release(); } STDMETHOD_(ULONG, AddRef)() override { return RuntimeClassT::AddRef(); } private:
+#define PROPERTY(name, type, defaultValue) public: type name() { return _##name; } void name(type value) { _##name = value; } private: type _##name = defaultValue;
+#define PROPERTY_CONST(name, type, defaultValue) public: type name() { return _##name; } void name(type const& value) { _##name = value; } private: type _##name = defaultValue;
diff --git a/Source/pch.h b/Source/pch.h
index 83e6999669..8fffb48d80 100644
--- a/Source/pch.h
+++ b/Source/pch.h
@@ -97,10 +97,10 @@ std::vector inline to_vector(IVectorView input)
template
std::function inline weak_handler(T* instance, void(T::* instanceMethod)(TSender, TArgs))
{
- std::weak_ptr wr = instance->weak_from_this();
+ auto wr = instance->weak_from_this();
auto handler = [wr, instanceMethod](TSender sender, TArgs args)
{
- auto instanceLocked = wr.lock();
+ auto instanceLocked = std::dynamic_pointer_cast(wr.lock());
if (instanceLocked)
{
(instanceLocked.get()->*instanceMethod)(sender, args);