2024-07-11 16:10:54 +08:00
# 相关蓝图类
BP_Live: 里面可以指定MediaPlayer以及MediaTexture, 并且替换蓝图子StaticMesh材质中的EmissiveMap为MediaTexture。
# 导播台
之后就可以将视频放到指定的Saved文件夹里, 就可以在导播台播放了。
2024-10-11 17:09:25 +08:00
# NDI 播放逻辑
2024-10-11 18:06:20 +08:00
通过道具来添加NDI 设置。
## 道具
- BP_ProjectorD0
- BP_Screen011
2024-10-11 17:09:25 +08:00
## 相关注释掉的代码
- TsMapEnvironmentAssets.ts
- TsMapEnvironmentSingleSelectItemView.ts
- SetMediaData()
- TsScreenPlayerItemView.ts
- SetData()
- TsScreenPlayerSelectItemPopupView.ts
2024-10-15 19:22:55 +08:00
- ChangeMediaType()
# NDI播放模糊问题解决
- bool UNDIMediaReceiver::CaptureConnectedVideo()
```c++
bool UNDIMediaReceiver::Initialize(const FNDIConnectionInformation& InConnectionInformation, UNDIMediaReceiver::EUsage InUsage)
{
if (this->p_receive_instance == nullptr)
{
if (IsValid(this->InternalVideoTexture))
this->InternalVideoTexture->UpdateResource();
// create a non-connected receiver instance
NDIlib_recv_create_v3_t settings;
settings.allow_video_fields = false;
settings.bandwidth = NDIlib_recv_bandwidth_highest;
settings.color_format = NDIlib_recv_color_format_fastest;
p_receive_instance = NDIlib_recv_create_v3(&settings);
// check if it was successful
if (p_receive_instance != nullptr)
{
// If the incoming connection information is valid
if (InConnectionInformation.IsValid())
{
//// Alright we created a non-connected receiver. Lets actually connect
ChangeConnection(InConnectionInformation);
}
if (InUsage == UNDIMediaReceiver::EUsage::Standalone)
{
this->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle);
VideoCaptureEventHandle = this->OnNDIReceiverVideoCaptureEvent.AddLambda([this ](UNDIMediaReceiver* receiver, const NDIlib_video_frame_v2_t& video_frame )
{
FTextureRHIRef ConversionTexture = this->DisplayFrame(video_frame);
if (ConversionTexture != nullptr)
{
if ((GetVideoTextureResource() != nullptr) & & (GetVideoTextureResource()->TextureRHI != ConversionTexture))
{
GetVideoTextureResource()->TextureRHI = ConversionTexture;
RHIUpdateTextureReference(this->VideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture);
}
if ((GetInternalVideoTextureResource() != nullptr) & & (GetInternalVideoTextureResource()->TextureRHI != ConversionTexture))
{
GetInternalVideoTextureResource()->TextureRHI = ConversionTexture;
RHIUpdateTextureReference(this->InternalVideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture);
}
}
});
// We don't want to limit the engine rendering speed to the sync rate of the connection hook
// into the core delegates render thread 'EndFrame'
FCoreDelegates::OnEndFrameRT.Remove(FrameEndRTHandle);
FrameEndRTHandle.Reset();
FrameEndRTHandle = FCoreDelegates::OnEndFrameRT.AddLambda([this]()
{
while(this->CaptureConnectedMetadata())
; // Potential improvement: limit how much metadata is processed, to avoid appearing to lock up due to a metadata flood
this->CaptureConnectedVideo();
});
#if UE_EDITOR
// We don't want to provide perceived issues with the plugin not working so
// when we get a Pre-exit message, forcefully shutdown the receiver
FCoreDelegates::OnPreExit.AddWeakLambda(this, [& ]() {
this->Shutdown();
FCoreDelegates::OnPreExit.RemoveAll(this);
});
// We handle this in the 'Play In Editor' versions as well.
FEditorDelegates::PrePIEEnded.AddWeakLambda(this, [& ](const bool ) {
this->Shutdown();
FEditorDelegates::PrePIEEnded.RemoveAll(this);
});
#endif
}
return true;
}
}
return false;
}
```
绘制函数
```c++
/**
Attempts to immediately update the 'VideoTexture' object with the last capture video frame
from the connected source
*/
FTextureRHIRef UNDIMediaReceiver::DisplayFrame(const NDIlib_video_frame_v2_t& video_frame)
{
// we need a command list to work with
FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList();
// Actually draw the video frame from cpu to gpu
switch(video_frame.frame_format_type)
{
case NDIlib_frame_format_type_progressive:
if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY)
return DrawProgressiveVideoFrame(RHICmdList, video_frame);
else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA)
return DrawProgressiveVideoFrameAlpha(RHICmdList, video_frame);
break;
case NDIlib_frame_format_type_field_0:
case NDIlib_frame_format_type_field_1:
if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY)
return DrawInterlacedVideoFrame(RHICmdList, video_frame);
else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA)
return DrawInterlacedVideoFrameAlpha(RHICmdList, video_frame);
break;
}
return nullptr;
}
```
2024-10-16 11:48:53 +08:00
DrawProgressiveVideoFrame
UNDIMediaReceiver::CaptureConnectedVideo
=>
DisplayFrame NDIlib_frame_format_type_progressive NDIlib_FourCC_video_type_UYVY
=>
2024-10-16 12:45:55 +08:00
DrawProgressiveVideoFrame
2024-10-17 13:45:48 +08:00
## Shader Binding RT
设置RT:
```c++
FTextureRHIRef TargetableTexture;
// check for our frame sync object and that we are actually connected to the end point
if (p_framesync_instance != nullptr)
{
// Initialize the frame size parameter
FIntPoint FrameSize = FIntPoint(Result.xres, Result.yres);
if (!RenderTarget.IsValid() || !RenderTargetDescriptor.IsValid() ||
RenderTargetDescriptor.GetSize() != FIntVector(FrameSize.X, FrameSize.Y, 0) ||
DrawMode != EDrawMode::Progressive)
{
// Create the RenderTarget descriptor
RenderTargetDescriptor = FPooledRenderTargetDesc::Create2DDesc(
FrameSize, PF_B8G8R8A8, FClearValueBinding::None, TexCreate_None, TexCreate_RenderTargetable | TexCreate_SRGB, false);
// Update the shader resource for the 'SourceTexture'
// The source texture will be given UYVY data, so make it half-width
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 1))
const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaReceiverProgressiveSourceTexture"))
.SetExtent(FrameSize.X / 2, FrameSize.Y)
.SetFormat(PF_B8G8R8A8)
.SetNumMips(1)
.SetFlags(ETextureCreateFlags::RenderTargetable | ETextureCreateFlags::Dynamic);
SourceTexture = RHICreateTexture(CreateDesc);
#elif (ENGINE_MAJOR_VERSION == 4) || (ENGINE_MAJOR_VERSION == 5)
FRHIResourceCreateInfo CreateInfo(TEXT("NDIMediaReceiverProgressiveSourceTexture"));
TRefCountPtr< FRHITexture2D > DummyTexture2DRHI;
RHICreateTargetableShaderResource2D(FrameSize.X / 2, FrameSize.Y, PF_B8G8R8A8, 1, TexCreate_Dynamic,
TexCreate_RenderTargetable, false, CreateInfo, SourceTexture,
DummyTexture2DRHI);
#else
#error "Unsupported engine major version"
#endif
// Find a free target-able texture from the render pool
GRenderTargetPool.FindFreeElement(RHICmdList, RenderTargetDescriptor, RenderTarget, TEXT("NDIIO"));
DrawMode = EDrawMode::Progressive;
}
#if ENGINE_MAJOR_VERSION >= 5
TargetableTexture = RenderTarget->GetRHI();
#elif ENGINE_MAJOR_VERSION == 4
TargetableTexture = RenderTarget->GetRenderTargetItem().TargetableTexture;
...
...
// Initialize the Render pass with the conversion texture
FRHITexture* ConversionTexture = TargetableTexture.GetReference();
FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store);
// Needs to be called *before* ApplyCachedRenderTargets, since BeginRenderPass is caching the render targets.
RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Recv Color Conversion"));
```
设置NDI传入的UYVY:
```c++
// set the texture parameter of the conversion shader
FNDIIOShaderUYVYtoBGRAPS::Params Params(SourceTexture, SourceTexture, FrameSize,
FVector2D(0, 0), FVector2D(1, 1),
bPerformsRGBtoLinear ? FNDIIOShaderPS::EColorCorrection::sRGBToLinear : FNDIIOShaderPS::EColorCorrection::None,
FVector2D(0.f, 1.f));
ConvertShader->SetParameters(RHICmdList, Params);
// Create the update region structure
FUpdateTextureRegion2D Region(0, 0, 0, 0, FrameSize.X/2, FrameSize.Y);
// Set the Pixel data of the NDI Frame to the SourceTexture
RHIUpdateTexture2D(SourceTexture, 0, Region, Result.line_stride_in_bytes, (uint8*&)Result.p_data);
```
2024-10-16 12:45:55 +08:00
## 解决方案
[NDI plugin质量问题 ](https://forums.unrealengine.com/t/ndi-plugin-quality-trouble/1970097 )
I changed only shader “NDIIO/Shaders/Private/NDIIOShaders.usf”.
For example function **void NDIIOUYVYtoBGRAPS (// Shader from 8 bits UYVY to 8 bits RGBA (alpha set to 1)):**
_WAS:_
```c++
float4 UYVYB = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerB, InUV);
float4 UYVYT = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, InUV);
float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth;
float4 YUVA;
float FracX = PosX % 2.0f;
YUVA.x = (1 - FracX) * UYVYT.y + FracX * UYVYT.w;
YUVA.yz = UYVYB.zx;
YUVA.w = 1;
```
_I DID:_
```c++
float4 UYVYB = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerB, InUV);
float4 UYVYT0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, InUV + float2(-0.25f / NDIIOShaderUB.InputWidth, 0));
float4 UYVYT1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, InUV + float2(0.25f / NDIIOShaderUB.InputWidth, 0));
float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth;
float4 YUVA;
float FracX = (PosX % 2.0f) * 0.5f;
YUVA.x = (1 - FracX) * UYVYT1.y + FracX * UYVYT0.w;
YUVA.yz = UYVYB.zx;
YUVA.w = 1;
```
Small changes but result is seems much more better.
Of course, I added a bit of sharpness to the material after I changed the shader, but even without that, the result looks better than in the original version.
2024-10-16 15:26:06 +08:00
滤波资料:https://zhuanlan.zhihu.com/p/633122224
2024-10-16 12:45:55 +08:00
## UYVY( YUV422)
- https://zhuanlan.zhihu.com/p/695302926
- https://blog.csdn.net/gsp1004/article/details/103037312
