6.6 KiB
相关蓝图类
BP_Live:里面可以指定MediaPlayer以及MediaTexture,并且替换蓝图子StaticMesh材质中的EmissiveMap为MediaTexture。
导播台
之后就可以将视频放到指定的Saved文件夹里,就可以在导播台播放了。
NDI 播放逻辑
通过道具来添加NDI 设置。
道具
- BP_ProjectorD0
- BP_Screen011
相关注释掉的代码
- TsMapEnvironmentAssets.ts
- TsMapEnvironmentSingleSelectItemView.ts
- SetMediaData()
- TsScreenPlayerItemView.ts
- SetData()
- TsScreenPlayerSelectItemPopupView.ts
- ChangeMediaType()
NDI播放模糊问题解决
- bool UNDIMediaReceiver::CaptureConnectedVideo()
bool UNDIMediaReceiver::Initialize(const FNDIConnectionInformation& InConnectionInformation, UNDIMediaReceiver::EUsage InUsage)
{
if (this->p_receive_instance == nullptr)
{
if (IsValid(this->InternalVideoTexture))
this->InternalVideoTexture->UpdateResource();
// create a non-connected receiver instance
NDIlib_recv_create_v3_t settings;
settings.allow_video_fields = false;
settings.bandwidth = NDIlib_recv_bandwidth_highest;
settings.color_format = NDIlib_recv_color_format_fastest;
p_receive_instance = NDIlib_recv_create_v3(&settings);
// check if it was successful
if (p_receive_instance != nullptr)
{
// If the incoming connection information is valid
if (InConnectionInformation.IsValid())
{
//// Alright we created a non-connected receiver. Lets actually connect
ChangeConnection(InConnectionInformation);
}
if (InUsage == UNDIMediaReceiver::EUsage::Standalone)
{
this->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle);
VideoCaptureEventHandle = this->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_video_frame_v2_t& video_frame)
{
FTextureRHIRef ConversionTexture = this->DisplayFrame(video_frame);
if (ConversionTexture != nullptr)
{
if ((GetVideoTextureResource() != nullptr) && (GetVideoTextureResource()->TextureRHI != ConversionTexture))
{
GetVideoTextureResource()->TextureRHI = ConversionTexture;
RHIUpdateTextureReference(this->VideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture);
}
if ((GetInternalVideoTextureResource() != nullptr) && (GetInternalVideoTextureResource()->TextureRHI != ConversionTexture))
{
GetInternalVideoTextureResource()->TextureRHI = ConversionTexture;
RHIUpdateTextureReference(this->InternalVideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture);
}
}
});
// We don't want to limit the engine rendering speed to the sync rate of the connection hook
// into the core delegates render thread 'EndFrame'
FCoreDelegates::OnEndFrameRT.Remove(FrameEndRTHandle);
FrameEndRTHandle.Reset();
FrameEndRTHandle = FCoreDelegates::OnEndFrameRT.AddLambda([this]()
{
while(this->CaptureConnectedMetadata())
; // Potential improvement: limit how much metadata is processed, to avoid appearing to lock up due to a metadata flood
this->CaptureConnectedVideo();
});
#if UE_EDITOR
// We don't want to provide perceived issues with the plugin not working so
// when we get a Pre-exit message, forcefully shutdown the receiver
FCoreDelegates::OnPreExit.AddWeakLambda(this, [&]() {
this->Shutdown();
FCoreDelegates::OnPreExit.RemoveAll(this);
});
// We handle this in the 'Play In Editor' versions as well.
FEditorDelegates::PrePIEEnded.AddWeakLambda(this, [&](const bool) {
this->Shutdown();
FEditorDelegates::PrePIEEnded.RemoveAll(this);
});
#endif
}
return true;
}
}
return false;
}
绘制函数
/**
Attempts to immediately update the 'VideoTexture' object with the last capture video frame
from the connected source
*/
FTextureRHIRef UNDIMediaReceiver::DisplayFrame(const NDIlib_video_frame_v2_t& video_frame)
{
// we need a command list to work with
FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList();
// Actually draw the video frame from cpu to gpu
switch(video_frame.frame_format_type)
{
case NDIlib_frame_format_type_progressive:
if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY)
return DrawProgressiveVideoFrame(RHICmdList, video_frame);
else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA)
return DrawProgressiveVideoFrameAlpha(RHICmdList, video_frame);
break;
case NDIlib_frame_format_type_field_0:
case NDIlib_frame_format_type_field_1:
if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY)
return DrawInterlacedVideoFrame(RHICmdList, video_frame);
else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA)
return DrawInterlacedVideoFrameAlpha(RHICmdList, video_frame);
break;
}
return nullptr;
}
DrawProgressiveVideoFrame
UNDIMediaReceiver::CaptureConnectedVideo => DisplayFrame NDIlib_frame_format_type_progressive NDIlib_FourCC_video_type_UYVY => DrawProgressiveVideoFrame
解决方案
I changed only shader “NDIIO/Shaders/Private/NDIIOShaders.usf”. For example function void NDIIOUYVYtoBGRAPS (// Shader from 8 bits UYVY to 8 bits RGBA (alpha set to 1)):
WAS:
float4 UYVYB = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerB, InUV);
float4 UYVYT = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, InUV);
float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth;
float4 YUVA;
float FracX = PosX % 2.0f;
YUVA.x = (1 - FracX) * UYVYT.y + FracX * UYVYT.w;
YUVA.yz = UYVYB.zx;
YUVA.w = 1;
I DID:
float4 UYVYB = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerB, InUV);
float4 UYVYT0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, InUV + float2(-0.25f / NDIIOShaderUB.InputWidth, 0));
float4 UYVYT1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, InUV + float2(0.25f / NDIIOShaderUB.InputWidth, 0));
float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth;
float4 YUVA;
float FracX = (PosX % 2.0f) * 0.5f;
YUVA.x = (1 - FracX) * UYVYT1.y + FracX * UYVYT0.w;
YUVA.yz = UYVYB.zx;
YUVA.w = 1;
Small changes but result is seems much more better. Of course, I added a bit of sharpness to the material after I changed the shader, but even without that, the result looks better than in the original version.
滤波资料:https://zhuanlan.zhihu.com/p/633122224