Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add ROS2 rgb camera plugin #24

Merged
merged 7 commits into from
Dec 30, 2021
Prev Previous commit
Next Next commit
add capture non blocking
  • Loading branch information
yuokamoto committed Dec 28, 2021
commit f7c687fe8a41a08a4684ffa51eca2608c7dd06ec
90 changes: 78 additions & 12 deletions Source/RapyutaSimulationPlugins/Private/Sensors/ROS2Camera.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -59,19 +59,85 @@ void AROS2Camera::MessageUpdate(UROS2GenericMsg *TopicMessage)
FROSImage AROS2Camera::GetData()
{
SceneCaptureComponent->CaptureScene();
CaptureNonBlocking();
if(!RenderRequestQueue.IsEmpty()){
// Peek the next RenderRequest from queue
FRenderRequest* nextRenderRequest = nullptr;
RenderRequestQueue.Peek(nextRenderRequest);

FTextureRenderTarget2DResource* RenderTargetResource;
RenderTargetResource = (FTextureRenderTarget2DResource*)RenderTarget->GameThread_GetRenderTargetResource();
if (RenderTargetResource) {
TArray<FColor> buffer;
RenderTargetResource->ReadPixels(buffer);
for (int I = 0; I < buffer.Num(); I++)
{
Data.data[I * 3 + 0] = buffer[I].R;
Data.data[I * 3 + 1] = buffer[I].G;
Data.data[I * 3 + 2] = buffer[I].B;
if(nextRenderRequest){ //nullptr check
if(nextRenderRequest->RenderFence.IsFenceComplete()){ // Check if rendering is done, indicated by RenderFence
for (int I = 0; I < nextRenderRequest->Image.Num(); I++)
{
Data.data[I * 3 + 0] = nextRenderRequest->Image[I].R;
Data.data[I * 3 + 1] = nextRenderRequest->Image[I].G;
Data.data[I * 3 + 2] = nextRenderRequest->Image[I].B;
// UE_LOG(LogTemp, Warning, TEXT("AsyncTaskDone"));
}

// Delete the first element from RenderQueue
RenderRequestQueue.Pop();
delete nextRenderRequest;
}
}
}
}

// SceneCaptureComponent->CaptureScene();
// FTextureRenderTarget2DResource* RenderTargetResource;
// RenderTargetResource = (FTextureRenderTarget2DResource*)RenderTarget->GameThread_GetRenderTargetResource();
// if (RenderTargetResource) {
// TArray<FColor> buffer;
// RenderTargetResource->ReadPixels(buffer);
// for (int I = 0; I < buffer.Num(); I++)
// {
// Data.data[I * 3 + 0] = buffer[I].R;
// Data.data[I * 3 + 1] = buffer[I].G;
// Data.data[I * 3 + 2] = buffer[I].B;
// }
// }

return Data;
}
}

// reference https://github.com/TimmHess/UnrealImageCapture
void AROS2Camera::CaptureNonBlocking(){

SceneCaptureComponent->TextureTarget->TargetGamma = GEngine->GetDisplayGamma();
// Get RenderContext
FTextureRenderTargetResource* renderTargetResource = SceneCaptureComponent->TextureTarget->GameThread_GetRenderTargetResource();

struct FReadSurfaceContext{
FRenderTarget* SrcRenderTarget;
TArray<FColor>* OutData;
FIntRect Rect;
FReadSurfaceDataFlags Flags;
};

// Init new RenderRequest
FRenderRequest* renderRequest = new FRenderRequest();

// Setup GPU command
FReadSurfaceContext readSurfaceContext = {
renderTargetResource,
&(renderRequest->Image),
FIntRect(0, 0, renderTargetResource->GetSizeXY().X, renderTargetResource->GetSizeXY().Y),
FReadSurfaceDataFlags(RCM_UNorm, CubeFace_MAX)
};

// Above 4.22 use this
ENQUEUE_RENDER_COMMAND(SceneDrawCompletion)(
[readSurfaceContext, this](FRHICommandListImmediate& RHICmdList){
RHICmdList.ReadSurfaceData(
readSurfaceContext.SrcRenderTarget->GetRenderTargetTexture(),
readSurfaceContext.Rect,
*readSurfaceContext.OutData,
readSurfaceContext.Flags
);
});

// Notify new task in RenderQueue
RenderRequestQueue.Enqueue(renderRequest);

// Set RenderCommandFence
renderRequest->RenderFence.BeginFence();
}
11 changes: 11 additions & 0 deletions Source/RapyutaSimulationPlugins/Public/Sensors/ROS2Camera.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,13 @@
/**
*
*/
USTRUCT()
struct FRenderRequest{
GENERATED_BODY()
TArray<FColor> Image;
FRenderCommandFence RenderFence;
};

UCLASS()
class RAPYUTASIMULATIONPLUGINS_API AROS2Camera : public AROS2Node
{
Expand All @@ -33,6 +40,10 @@ class RAPYUTASIMULATIONPLUGINS_API AROS2Camera : public AROS2Node
UFUNCTION()
void MessageUpdate(UROS2GenericMsg *TopicMessage);

void CaptureNonBlocking();

TQueue<FRenderRequest*> RenderRequestQueue;

FROSImage Data;

public:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ public RapyutaSimulationPlugins(ReadOnlyTargetRules Target) : base(Target)
{
PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;

PublicDependencyModuleNames.AddRange(new string[] { "Core", "CoreUObject", "Engine", "InputCore", "AIModule", "rclUE", "PhysicsCore" });
PublicDependencyModuleNames.AddRange(new string[] { "Core", "CoreUObject", "Engine", "InputCore", "ImageWrapper", "RenderCore", "Renderer", "RHI", "AIModule", "rclUE", "PhysicsCore" });

PrivateDependencyModuleNames.AddRange(new string[] { });

Expand Down