142 lines
4.8 KiB
Markdown
Raw Normal View History

2024-07-11 16:10:54 +08:00
# 相关蓝图类
BP_Live里面可以指定MediaPlayer以及MediaTexture并且替换蓝图子StaticMesh材质中的EmissiveMap为MediaTexture。
# 导播台
之后就可以将视频放到指定的Saved文件夹里就可以在导播台播放了。
2024-10-11 17:09:25 +08:00
# NDI 播放逻辑
2024-10-11 18:06:20 +08:00
通过道具来添加NDI 设置。
## 道具
- BP_ProjectorD0
- BP_Screen011
2024-10-11 17:09:25 +08:00
## 相关注释掉的代码
- TsMapEnvironmentAssets.ts
- TsMapEnvironmentSingleSelectItemView.ts
- SetMediaData()
- TsScreenPlayerItemView.ts
- SetData()
- TsScreenPlayerSelectItemPopupView.ts
2024-10-15 19:22:55 +08:00
- ChangeMediaType()
# NDI播放模糊问题解决
- bool UNDIMediaReceiver::CaptureConnectedVideo()
```c++
bool UNDIMediaReceiver::Initialize(const FNDIConnectionInformation& InConnectionInformation, UNDIMediaReceiver::EUsage InUsage)
{
if (this->p_receive_instance == nullptr)
{
if (IsValid(this->InternalVideoTexture))
this->InternalVideoTexture->UpdateResource();
// create a non-connected receiver instance
NDIlib_recv_create_v3_t settings;
settings.allow_video_fields = false;
settings.bandwidth = NDIlib_recv_bandwidth_highest;
settings.color_format = NDIlib_recv_color_format_fastest;
p_receive_instance = NDIlib_recv_create_v3(&settings);
// check if it was successful
if (p_receive_instance != nullptr)
{
// If the incoming connection information is valid
if (InConnectionInformation.IsValid())
{
//// Alright we created a non-connected receiver. Lets actually connect
ChangeConnection(InConnectionInformation);
}
if (InUsage == UNDIMediaReceiver::EUsage::Standalone)
{
this->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle);
VideoCaptureEventHandle = this->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_video_frame_v2_t& video_frame)
{
FTextureRHIRef ConversionTexture = this->DisplayFrame(video_frame);
if (ConversionTexture != nullptr)
{
if ((GetVideoTextureResource() != nullptr) && (GetVideoTextureResource()->TextureRHI != ConversionTexture))
{
GetVideoTextureResource()->TextureRHI = ConversionTexture;
RHIUpdateTextureReference(this->VideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture);
}
if ((GetInternalVideoTextureResource() != nullptr) && (GetInternalVideoTextureResource()->TextureRHI != ConversionTexture))
{
GetInternalVideoTextureResource()->TextureRHI = ConversionTexture;
RHIUpdateTextureReference(this->InternalVideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture);
}
}
});
// We don't want to limit the engine rendering speed to the sync rate of the connection hook
// into the core delegates render thread 'EndFrame'
FCoreDelegates::OnEndFrameRT.Remove(FrameEndRTHandle);
FrameEndRTHandle.Reset();
FrameEndRTHandle = FCoreDelegates::OnEndFrameRT.AddLambda([this]()
{
while(this->CaptureConnectedMetadata())
; // Potential improvement: limit how much metadata is processed, to avoid appearing to lock up due to a metadata flood
this->CaptureConnectedVideo();
});
#if UE_EDITOR
// We don't want to provide perceived issues with the plugin not working so
// when we get a Pre-exit message, forcefully shutdown the receiver
FCoreDelegates::OnPreExit.AddWeakLambda(this, [&]() {
this->Shutdown();
FCoreDelegates::OnPreExit.RemoveAll(this);
});
// We handle this in the 'Play In Editor' versions as well.
FEditorDelegates::PrePIEEnded.AddWeakLambda(this, [&](const bool) {
this->Shutdown();
FEditorDelegates::PrePIEEnded.RemoveAll(this);
});
#endif
}
return true;
}
}
return false;
}
```
绘制函数
```c++
/**
Attempts to immediately update the 'VideoTexture' object with the last capture video frame
from the connected source
*/
FTextureRHIRef UNDIMediaReceiver::DisplayFrame(const NDIlib_video_frame_v2_t& video_frame)
{
// we need a command list to work with
FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList();
// Actually draw the video frame from cpu to gpu
switch(video_frame.frame_format_type)
{
case NDIlib_frame_format_type_progressive:
if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY)
return DrawProgressiveVideoFrame(RHICmdList, video_frame);
else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA)
return DrawProgressiveVideoFrameAlpha(RHICmdList, video_frame);
break;
case NDIlib_frame_format_type_field_0:
case NDIlib_frame_format_type_field_1:
if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY)
return DrawInterlacedVideoFrame(RHICmdList, video_frame);
else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA)
return DrawInterlacedVideoFrameAlpha(RHICmdList, video_frame);
break;
}
return nullptr;
}
```
DrawProgressiveVideoFrame