vault backup: 2024-11-14 19:07:12

This commit is contained in:
BlueRose 2024-11-14 19:07:13 +08:00
parent d5cebe4217
commit daa374467a
2 changed files with 7 additions and 2 deletions

View File

@ -415,3 +415,4 @@ public:
## MotionReceiverActor
![[动捕逻辑思维导图.canvas]]

View File

@ -3,11 +3,15 @@
{"id":"2666bc7c541cb485","type":"text","text":"FChingmuThread::Run()\n\n发送数据\nOnGetHumanData_NotInGameThread() => PutMocapDataIntoQueue => Sender->OnGetRawMocapData_NotInGameThread(jsonStr);\n\n```c++\nwhile (bRun)\n{\n\tif (OwnerActor && OwnerActor->UseThread && OwnerActor->ChingmuComp && OwnerActor->ChingmuComp->IsConnected())\n\t{\n\t\tCurTime = ULiveDirectorStatics::GetUnixTime();\n\t\t// Human\n\t\tfor (auto HumanIndex = 0; HumanIndex < OwnerActor->MaxHumanCount; HumanIndex++)\n\t\t{\n\t\t\tconst auto bRes = OwnerActor->ChingmuComp->FullBodyMotionCapBaseBonesLocalSpaceRotation(\n\t\t\t\tOwnerActor->ChingmuFullAddress, HumanIndex, TmpTimeCode);\n\t\t\tif (bRes)\n\t\t\t{\n\t\t\t\tif (!HumanToLastReceiveTime.Contains(HumanIndex))\n\t\t\t\t{\n\t\t\t\t\tHumanToLastReceiveTime.Add(HumanIndex, 0);\n\t\t\t\t}\n\t\t\t\tif (HumanToLastReceiveTime[HumanIndex] != TmpTimeCode.Frames)\n\t\t\t\t{\n\t\t\t\t\tHumanToLastReceiveTime[HumanIndex] = TmpTimeCode.Frames;\n\t\t\t\t\tOwnerActor->OnGetHumanData_NotInGameThread(HumanIndex, CurTime, TmpTimeCode.Frames);\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\t// get same frame, skip\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\tif (bRun)\n\t{\n\t\tFPlatformProcess::Sleep(OwnerActor ? OwnerActor->ThreadInterval : 0.004);\n\t}\n\telse\n\t{\n\t\tbreak;\n\t}\n}\n\n```","x":-600,"y":-420,"width":980,"height":1180},
{"id":"c5705d4ff792be0b","type":"text","text":"**ChingmuComp.StartConnectServer()** 在UI界面控制链接服务器。\nAChingmuMocapReceiverActor::BeginPlay()创建FChingmuThread。","x":-360,"y":-640,"width":500,"height":140},
{"id":"668c865498842d96","type":"text","text":"AChingmuMocapReceiverActor::Tick()\n\n```c++\nconst auto CurTime = ULiveDirectorStatics::GetUnixTime();\nif(UseThread)\n{\n\t// 线程方式\n\t// 在数据队列中获取青瞳数据\n\twhile (!FrameQueue.IsEmpty())\n\t{\n\t\tST_MocapFrameData* Frame;\n\t\tif (FrameQueue.Dequeue(Frame))\n\t\t{\n\t\t\tPutMocapDataIntoFrameList(Frame);\n\t\t}\n\t}\n}\n\nDoSample(AllHumanFrames);\nDoSample(AllRigidBodyFrames);\n\n// 每隔1s计算一次平均包间隔\nif (CurTime - LastCheckIntervalTime > 1000)\n{\n\tif (AllHumanFrames.Num() > 0)\n\t{\n\t\tAllHumanFrames[0]->CalculatePackageAverageInterval(this->PackageAverageInterval);\n\t\tLastCheckIntervalTime = CurTime;\n\t}\n}\n```","x":-600,"y":820,"width":980,"height":800},
{"id":"04df15f334d740f3","type":"text","text":"IdolAnimInstance & Anim_FullBody\n\nIdolAnimInstance主要是取得场景中的**AMotionReceiverActor**以及设置身份。\nAnim_FullBody\n\n```c++\nvoid FAnimNode_FullBody::Update_AnyThread(const FAnimationUpdateContext& Context)\n{\n\tSourcePose.Update(Context);\n\tEMotionSourceType MotionSourceType = EMotionSourceType::MST_MotionServer;\n\tconst UIdolAnimInstance* IdolAnimInstance = Cast<UIdolAnimInstance>(\n\t\tContext.AnimInstanceProxy->GetAnimInstanceObject());\n\tif (IdolAnimInstance)\n\t{\n\t\tMotionSourceType = IdolAnimInstance->GetMotionSourceType();\n\t}\n\tif (MotionSourceType == EMotionSourceType::MST_MotionServer)\n\t{\n\t\tconst FString ValidIdentity = GetFullBodyIdentity(Context);\n\t\tconst auto Recv = GetMotionReceiver(Context);\n\t\tif (!ValidIdentity.IsEmpty() && Recv.IsValid())\n\t\t{\n\t\t\tbGetMotionData = Recv->SampleFullBodyData_AnimationThread(ValidIdentity,\n\t\t\t ULiveDirectorStatics::GetUnixTime() -\n\t\t\t UMotionUtils::BackSampleTime * 2,\n\t\t\t SampledFullBodyData);\n\t\t}\n\t}\n}\n\nvoid FAnimNode_FullBody::Evaluate_AnyThread(FPoseContext& Output)\n{\n\tSourcePose.Evaluate(Output);\n\tif (!InitializedBoneRefIndex)\n\t{\n\t\tInitBoneRefIndex(Output);\n\t\tInitializedBoneRefIndex = true;\n\t}\n\tEMotionSourceType MotionSourceType = EMotionSourceType::MST_MotionServer;\n\tconst UIdolAnimInstance* IdolAnimInstance = Cast<UIdolAnimInstance>(\n\t\tOutput.AnimInstanceProxy->GetAnimInstanceObject());\n\tif (IdolAnimInstance)\n\t{\n\t\tMotionSourceType = IdolAnimInstance->GetMotionSourceType();\n\t}\n\n\tFMotionFrameFullBodyData& EvaluatedFullBodyData = SampledFullBodyData;\n\n\tswitch (MotionSourceType)\n\t{\n\tcase EMotionSourceType::MST_MotionServer:\n\t\tif (!bGetMotionData)\n\t\t{\n\t\t\treturn;\n\t\t}\n\t\tEvaluatedFullBodyData = SampledFullBodyData;\n\t\tbreak;\n\tcase EMotionSourceType::MST_SequoiaReplay:\n\t\t{\n\t\t\t// Evaluate from sequoia source.\n\t\t\tconst FSequoiaMotionSource& MotionSource = FSequoiaMotionSource::Get();\n\t\t\tconst FString ValidIdentity = GetFullBodyIdentity(Output);\n\t\t\tif (const FMotionFrameFullBodyData* FrameSnapshot = MotionSource.EvaluateFrame_AnyThread(ValidIdentity))\n\t\t\t{\n\t\t\t\tEvaluatedFullBodyData = *FrameSnapshot;\n\t\t\t\tbGetMotionData = true;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tUE_LOG(LogTemp, Warning, TEXT(\"%s No Sequoia Frame Data found.AvatarName=%s\"),\n\t\t\t\t ANSI_TO_TCHAR(__FUNCTION__), *ValidIdentity)\n\t\t\t\tbGetMotionData = false;\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\n\t\tbreak;\n\tdefault:\n\t\tbreak;\n\t}\n\n\tApplyDataToPose(Output, EvaluatedFullBodyData);\n}\n```","x":-960,"y":1720,"width":1700,"height":2080}
{"id":"04df15f334d740f3","type":"text","text":"IdolAnimInstance & Anim_FullBody\n\nIdolAnimInstance主要是取得场景中的**AMotionReceiverActor**以及设置身份。\nAnim_FullBody\n\n```c++\nvoid FAnimNode_FullBody::Update_AnyThread(const FAnimationUpdateContext& Context)\n{\n\tSourcePose.Update(Context);\n\tEMotionSourceType MotionSourceType = EMotionSourceType::MST_MotionServer;\n\tconst UIdolAnimInstance* IdolAnimInstance = Cast<UIdolAnimInstance>(\n\t\tContext.AnimInstanceProxy->GetAnimInstanceObject());\n\tif (IdolAnimInstance)\n\t{\n\t\tMotionSourceType = IdolAnimInstance->GetMotionSourceType();\n\t}\n\tif (MotionSourceType == EMotionSourceType::MST_MotionServer)\n\t{\n\t\tconst FString ValidIdentity = GetFullBodyIdentity(Context);\n\t\tconst auto Recv = GetMotionReceiver(Context);\n\t\tif (!ValidIdentity.IsEmpty() && Recv.IsValid())\n\t\t{\n\t\t\tbGetMotionData = Recv->SampleFullBodyData_AnimationThread(ValidIdentity,\n\t\t\t ULiveDirectorStatics::GetUnixTime() -\n\t\t\t UMotionUtils::BackSampleTime * 2,\n\t\t\t SampledFullBodyData);\n\t\t}\n\t}\n}\n\nvoid FAnimNode_FullBody::Evaluate_AnyThread(FPoseContext& Output)\n{\n\tSourcePose.Evaluate(Output);\n\tif (!InitializedBoneRefIndex)\n\t{\n\t\tInitBoneRefIndex(Output);\n\t\tInitializedBoneRefIndex = true;\n\t}\n\tEMotionSourceType MotionSourceType = EMotionSourceType::MST_MotionServer;\n\tconst UIdolAnimInstance* IdolAnimInstance = Cast<UIdolAnimInstance>(\n\t\tOutput.AnimInstanceProxy->GetAnimInstanceObject());\n\tif (IdolAnimInstance)\n\t{\n\t\tMotionSourceType = IdolAnimInstance->GetMotionSourceType();\n\t}\n\n\tFMotionFrameFullBodyData& EvaluatedFullBodyData = SampledFullBodyData;\n\n\tswitch (MotionSourceType)\n\t{\n\tcase EMotionSourceType::MST_MotionServer:\n\t\tif (!bGetMotionData)\n\t\t{\n\t\t\treturn;\n\t\t}\n\t\tEvaluatedFullBodyData = SampledFullBodyData;\n\t\tbreak;\n\tcase EMotionSourceType::MST_SequoiaReplay:\n\t\t{\n\t\t\t// Evaluate from sequoia source.\n\t\t\tconst FSequoiaMotionSource& MotionSource = FSequoiaMotionSource::Get();\n\t\t\tconst FString ValidIdentity = GetFullBodyIdentity(Output);\n\t\t\tif (const FMotionFrameFullBodyData* FrameSnapshot = MotionSource.EvaluateFrame_AnyThread(ValidIdentity))\n\t\t\t{\n\t\t\t\tEvaluatedFullBodyData = *FrameSnapshot;\n\t\t\t\tbGetMotionData = true;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tUE_LOG(LogTemp, Warning, TEXT(\"%s No Sequoia Frame Data found.AvatarName=%s\"),\n\t\t\t\t ANSI_TO_TCHAR(__FUNCTION__), *ValidIdentity)\n\t\t\t\tbGetMotionData = false;\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\n\t\tbreak;\n\tdefault:\n\t\tbreak;\n\t}\n\n\tApplyDataToPose(Output, EvaluatedFullBodyData);\n}\n```","x":-960,"y":1720,"width":1700,"height":2080},
{"id":"778e83e66edd5118","x":-903,"y":3980,"width":1586,"height":197,"type":"text","text":"bool AMotionReceiverActor::SampleFullBodyData_AnimationThread()\n1. 对CharacterToFrameList里的角色数据进行采样并将采样数据存储到SampledFullBodyData中。\n2. CharacterToFrameList的数据会在接收到网络传递的逻辑后填充ASimpleUDPReceiverActor::OnReceiveData_NetworkThread() => ProcessReceivedData_NetworkThread => PutFrameIntoQueue_NetworkThread() "},
{"id":"521dba38cdd6c593","x":-460,"y":4300,"width":700,"height":120,"type":"text","text":"FMotionFrameFullBodyData& EvaluatedFullBodyData = SampledFullBodyData;\nApplyDataToPose(Output, EvaluatedFullBodyData);"}
],
"edges":[
{"id":"b6e4d43c4c38cf16","fromNode":"2666bc7c541cb485","fromSide":"bottom","toNode":"668c865498842d96","toSide":"top"},
{"id":"34998812ac1bd8a8","fromNode":"c5705d4ff792be0b","fromSide":"bottom","toNode":"2666bc7c541cb485","toSide":"top"},
{"id":"2e063b7710fd9a81","fromNode":"668c865498842d96","fromSide":"bottom","toNode":"04df15f334d740f3","toSide":"top"}
{"id":"2e063b7710fd9a81","fromNode":"668c865498842d96","fromSide":"bottom","toNode":"04df15f334d740f3","toSide":"top"},
{"id":"ddef3dd868ca08bf","fromNode":"04df15f334d740f3","fromSide":"bottom","toNode":"778e83e66edd5118","toSide":"top","label":"Update_AnyThread"},
{"id":"037baa41a3eb9866","fromNode":"778e83e66edd5118","fromSide":"bottom","toNode":"521dba38cdd6c593","toSide":"top","label":"Evaluate_AnyThread"}
]
}