Skip to content

Commit

Permalink
- Subtitles: Adds Bitmap support with alpha blend (mainly for dvd/blu…
Browse files Browse the repository at this point in the history
…-ray) [Fixes #460]

- Player: Latency improvements

- Renderer: Alpha blend improvements

- Renderer: Exposing Config.Video.MaxFrameLatency to manually set it if required
  • Loading branch information
SuRGeoNix committed Jul 16, 2024
1 parent fb8f9d3 commit 9a47e74
Show file tree
Hide file tree
Showing 18 changed files with 310 additions and 86 deletions.
5 changes: 5 additions & 0 deletions FlyleafLib/Engine/Config.cs
Original file line number Diff line number Diff line change
Expand Up @@ -622,6 +622,11 @@ public System.Windows.Media.Color
/// </summary>
public double MaxOutputFps { get; set; } = 60;

/// <summary>
/// DXGI Maximum Frame Latency (1 - 16)
/// </summary>
public int MaxFrameLatency { get; set; } = 1;

/// <summary>
/// The max resolution that the current system can achieve and will be used from the input/stream suggester plugins
/// </summary>
Expand Down
11 changes: 11 additions & 0 deletions FlyleafLib/MediaFramework/MediaContext/DecoderContext.Open.cs
Original file line number Diff line number Diff line change
Expand Up @@ -861,7 +861,18 @@ public void OpenSuggestedSubtitles()

// 4. Prevent Local/Online Search for 'small' duration videos
if (VideoDemuxer.Duration < TimeSpan.FromMinutes(25).Ticks)
{
// 6. (Any) Check embedded/external streams for config languages (including 'undefined')
SuggestSubtitles(out var stream, out var extStream);

if (stream != null)
Open(stream);
else if (extStream != null)
Open(extStream);

return;
}


} catch (Exception e)
{
Expand Down
104 changes: 60 additions & 44 deletions FlyleafLib/MediaFramework/MediaDecoder/SubtitlesDecoder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public SubtitlesDecoder(Config config, int uniqueId = -1) : base(config, uniqueI
protected override unsafe int Setup(AVCodec* codec) => 0;

protected override void DisposeInternal()
=> Frames = new ConcurrentQueue<SubtitlesFrame>();
=> DisposeFrames();

public void Flush()
{
Expand Down Expand Up @@ -121,9 +121,13 @@ protected override void RunInternal()
{
if (Status == Status.Stopped || demuxer.SubtitlesPackets.Count == 0) continue;
packet = demuxer.SubtitlesPackets.Dequeue();

int gotFrame = 0;
AVSubtitle sub = new();
ret = avcodec_decode_subtitle2(codecCtx, &sub, &gotFrame, packet);
SubtitlesFrame subFrame = new();

fixed(AVSubtitle* subPtr = &subFrame.sub)
ret = avcodec_decode_subtitle2(codecCtx, subPtr, &gotFrame, packet);

if (ret < 0)
{
allowedErrors--;
Expand All @@ -133,9 +137,12 @@ protected override void RunInternal()

continue;
}

if (gotFrame < 1 || sub.num_rects < 1 ) continue;
if (packet->pts == AV_NOPTS_VALUE) { avsubtitle_free(&sub); av_packet_free(&packet); continue; }

if (gotFrame < 1 || packet->pts == AV_NOPTS_VALUE)
{
av_packet_free(&packet);
continue;
}

// TODO: CodecChanged? And when findstreaminfo is disabled as it is an external demuxer will not know the main demuxer's start time
if (!filledFromCodec)
Expand All @@ -147,54 +154,63 @@ protected override void RunInternal()
CodecChanged?.Invoke(this);
}

var mFrame = ProcessSubtitlesFrame(packet, &sub);
if (mFrame != null) Frames.Enqueue(mFrame);
if (subFrame.sub.num_rects < 1)
{
if (SubtitlesStream.IsBitmap) // clear prev subs frame
{
subFrame.duration = subFrame.sub.end_display_time;
subFrame.timestamp = (long)(packet->pts * SubtitlesStream.Timebase) + (subFrame.sub.start_display_time * 10000) - demuxer.StartTime + Config.Subtitles.Delay;
Frames.Enqueue(subFrame);
}

fixed(AVSubtitle* subPtr = &subFrame.sub)
avsubtitle_free(subPtr);

av_packet_free(&packet);
continue;
}

subFrame.duration = subFrame.sub.end_display_time;
subFrame.timestamp = (long)(packet->pts * SubtitlesStream.Timebase) + (subFrame.sub.start_display_time * 10000) - demuxer.StartTime + Config.Subtitles.Delay;

if (subFrame.sub.rects[0]->type == AVSubtitleType.SUBTITLE_ASS ||
subFrame.sub.rects[0]->type == AVSubtitleType.SUBTITLE_TEXT)
{
subFrame.text = Utils.BytePtrToStringUTF8(subFrame.sub.rects[0]->ass);
Config.Subtitles.Parser(subFrame);

fixed(AVSubtitle* subPtr = &subFrame.sub)
avsubtitle_free(subPtr);
}

if (CanTrace) Log.Trace($"Processes {Utils.TicksToTime(subFrame.timestamp)}");

Frames.Enqueue(subFrame);

avsubtitle_free(&sub);
av_packet_free(&packet);
}
} while (Status == Status.Running);
}

private SubtitlesFrame ProcessSubtitlesFrame(AVPacket* packet, AVSubtitle* sub)
public static void DisposeFrame(SubtitlesFrame frame)
{
if (frame.sub.num_rects > 0)
fixed(AVSubtitle* ptr = &frame.sub)
avsubtitle_free(ptr);
}

try
public void DisposeFrames()
{
if (!SubtitlesStream.IsBitmap)
Frames = new ConcurrentQueue<SubtitlesFrame>();
else
{
string line = "";
byte[] buffer;
var rects = sub->rects;
var cur = rects[0];

switch (cur->type)
while (!Frames.IsEmpty)
{
case AVSubtitleType.SUBTITLE_ASS:
case AVSubtitleType.SUBTITLE_TEXT:
buffer = new byte[1024];
line = Utils.BytePtrToStringUTF8(cur->ass);
break;

//case AVSubtitleType.SUBTITLE_BITMAP:
//Log("Subtitles BITMAP -> Not Implemented yet");

default:
return null;
Frames.TryDequeue(out var frame);
DisposeFrame(frame);
}

SubtitlesFrame mFrame = new(line)
{
duration = (int)(sub->end_display_time - sub->start_display_time),
timestamp = (long)(packet->pts * SubtitlesStream.Timebase) - demuxer.StartTime + Config.Subtitles.Delay
};

if (CanTrace) Log.Trace($"Processes {Utils.TicksToTime(mFrame.timestamp)}");

Config.Subtitles.Parser(mFrame);

return mFrame;
} catch (Exception e) { Log.Error($"Failed to process frame ({e.Message})"); return null; }
}

}

public void DisposeFrames()
=> Frames = new ConcurrentQueue<SubtitlesFrame>();
}
14 changes: 14 additions & 0 deletions FlyleafLib/MediaFramework/MediaDemuxer/Demuxer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -498,6 +498,20 @@ public string Open(string url, Stream stream)
// Find Streams Info
if (Config.AllowFindStreamInfo)
{
// TBR: Tested and even if it requires more analyze duration it does not actually use it
bool requiresMoreAnalyse = false;
for (int i = 0; i < fmtCtx->nb_streams; i++)
if (fmtCtx->streams[i]->codecpar->codec_id == AVCodecID.AV_CODEC_ID_HDMV_PGS_SUBTITLE ||
fmtCtx->streams[i]->codecpar->codec_id == AVCodecID.AV_CODEC_ID_DVD_SUBTITLE
)
{ requiresMoreAnalyse = true; break; }

if (requiresMoreAnalyse)
{
fmtCtx->probesize = Math.Max(fmtCtx->probesize, 5000 * (long)1024 * 1024); // Bytes
fmtCtx->max_analyze_duration = Math.Max(fmtCtx->max_analyze_duration, 1000 * (long)1000 * 1000); // Mcs
}

ret = avformat_find_stream_info(fmtCtx, null);
if (ret == AVERROR_EXIT || Status != Status.Opening || Interrupter.ForceInterrupt == 1) return error = "Cancelled";
if (ret < 0) return error = $"[avformat_find_stream_info] {FFmpegEngine.ErrorCodeToMsg(ret)} ({ret})";
Expand Down
11 changes: 5 additions & 6 deletions FlyleafLib/MediaFramework/MediaFrame/SubtitlesFrame.cs
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
using System.Collections.Generic;
using FFmpeg.AutoGen;
using System.Collections.Generic;
using System.Drawing;
using System.Globalization;

namespace FlyleafLib.MediaFramework.MediaFrame;

public class SubtitlesFrame : FrameBase
public unsafe class SubtitlesFrame : FrameBase
{
public int duration;
public uint duration;
public string text;
public List<SubStyle> subStyles;

public SubtitlesFrame(string text)
=> this.text = text;
public AVSubtitle sub;
}

public struct SubStyle
Expand Down
39 changes: 27 additions & 12 deletions FlyleafLib/MediaFramework/MediaRenderer/Renderer.Device.cs
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,32 @@ public unsafe partial class Renderer
FeatureLevel.Level_9_1
};

static BlendDescription blendDesc = new();

static Renderer()
{
blendDesc.RenderTarget[0].BlendEnable = true;
blendDesc.RenderTarget[0].SourceBlend = Blend.SourceAlpha;
blendDesc.RenderTarget[0].DestinationBlend = Blend.InverseSourceAlpha;
blendDesc.RenderTarget[0].BlendOperation = BlendOperation.Add;
blendDesc.RenderTarget[0].SourceBlendAlpha = Blend.Zero;
blendDesc.RenderTarget[0].DestinationBlendAlpha = Blend.Zero;
blendDesc.RenderTarget[0].BlendOperationAlpha = BlendOperation.Add;
blendDesc.RenderTarget[0].RenderTargetWriteMask = ColorWriteEnable.All;
}


ID3D11DeviceContext context;

ID3D11Buffer vertexBuffer;
ID3D11InputLayout inputLayout;
ID3D11RasterizerState rasterizerState;
ID3D11RasterizerState
rasterizerState;
ID3D11BlendState blendStateAlpha;

ID3D11VertexShader ShaderVS;
ID3D11PixelShader ShaderPS;
ID3D11PixelShader ShaderBGRA;

ID3D11Buffer psBuffer;
PSBufferType psBufferData;
Expand Down Expand Up @@ -180,7 +197,7 @@ public void Initialize(bool swapChain = true)
adapter.Dispose();

using (var mthread = Device.QueryInterface<ID3D11Multithread>()) mthread.SetMultithreadProtected(true);
using (var dxgidevice = Device.QueryInterface<IDXGIDevice1>()) dxgidevice.MaximumFrameLatency = 1;
using (var dxgidevice = Device.QueryInterface<IDXGIDevice1>()) dxgidevice.MaximumFrameLatency = Config.Video.MaxFrameLatency;

// Input Layout
inputLayout = Device.CreateInputLayout(inputElements, ShaderCompiler.VSBlob);
Expand Down Expand Up @@ -218,17 +235,11 @@ public void Initialize(bool swapChain = true)
context.PSSetConstantBuffer(0, psBuffer);
psBufferData.hdrmethod = HDRtoSDRMethod.None;
context.UpdateSubresource(psBufferData, psBuffer);

// subs
ShaderBGRA = ShaderCompiler.CompilePS(Device, "bgra", @"color = float4(Texture1.Sample(Sampler, input.Texture).rgba);", null);

// Blend State (currently used -mainly- for RGBA images)
var blendDesc = new BlendDescription();
blendDesc.RenderTarget[0].BlendEnable = true;
blendDesc.RenderTarget[0].SourceBlend = Blend.SourceAlpha;
blendDesc.RenderTarget[0].DestinationBlend = Blend.Zero;
blendDesc.RenderTarget[0].BlendOperation = BlendOperation.Add;
blendDesc.RenderTarget[0].SourceBlendAlpha = Blend.One;
blendDesc.RenderTarget[0].DestinationBlendAlpha = Blend.Zero;
blendDesc.RenderTarget[0].BlendOperationAlpha = BlendOperation.Add;
blendDesc.RenderTarget[0].RenderTargetWriteMask = ColorWriteEnable.All;
// Blend State (currently used -mainly- for RGBA images and OverlayTexture)
blendStateAlpha = Device.CreateBlendState(blendDesc);

// Rasterizer (Will change CullMode to None for H-V Flip)
Expand Down Expand Up @@ -327,6 +338,10 @@ public void Dispose()
blendStateAlpha?.Dispose();
DisposeSwapChain();

overlayTexture?.Dispose();
overlayTextureSrv?.Dispose();
ShaderBGRA?.Dispose();

singleGpu?.Dispose();
singleStage?.Dispose();
singleGpuRtv?.Dispose();
Expand Down
Loading

0 comments on commit 9a47e74

Please sign in to comment.