Problems with rendering a video stream onto a texture/material

Hi together,

I´m pretty new to Unreal. I want to render a RGB video from a Realsense D435 to a texture/material (so I can see it within the game).

So I wrote a plugin (because Intel sadly does not provide one any more) for accessing the realsense api. It works so far as I can now grab images from the camera. But I have problems showing it in the game.

I used some tutorials and questions to get this far. This is what I did:
I created a dynamic material within a blueprint, and I´m trying to update its texture (which is a Param2d defining the base color of the material, called “VideoTexture”) every tick with a fresh grabbed frame.

Here is my cpp file (I tried the build-in UpdateTextureRegions() as well as the older one using an older tutorial):

// Sets default values for this component's properties
ARealSenseComponent::ARealSenseComponent()
{

}




void ARealSenseComponent::CreateUpdateableTexture(int width, int height)
{
	TextureFromVideo = UTexture2D::CreateTransient(width, height);
	TextureFromVideo->AddToRoot();
	TextureFromVideo->UpdateResource();
	textureVideoRegion = new FUpdateTextureRegion2D(0, 0, 0, 0, width, height);

}

UTexture2D * ARealSenseComponent::ReceiveRGBFrame()
{
	if (cameraWorks) {
		TextureFromVideo->UpdateResource();
		if (!receiveFrame()) {
			UE_LOG(RealSenseLog, Error, TEXT("Could not receive a Frame from camera!"));
		}
		else {
			UE_LOG(RealSenseLog, Log, TEXT("Received Frame!"))
		}
	}
	else {
		UE_LOG(RealSenseLog, Log, TEXT("CameraWorks is false"));
	}
	return TextureFromVideo;
}

// Called when the game starts
void ARealSenseComponent::BeginPlay()
{
	Super::BeginPlay();

	if (IRealSenseModule::IsAvailable()) {
		int x = IRealSenseModule::Get().CheckRealsenseCamera();
		UE_LOG(RealSenseLog, Log, TEXT("CheckRealsenseCamera returned: %d"), x);
	}
	else {
		UE_LOG(RealSenseLog, Warning, TEXT("RealSenseModule not available"));
	}


	try {

		pipeline = new rs2::pipeline();
		pipeline->start();
		cameraWorks = true;
	}
	catch (std::exception e) {
		UE_LOG(RealSenseLog, Error, TEXT("Realsense initialization error: %s"), e.what());
	}

	try {
		rs2::frameset frames = pipeline->wait_for_frames();
		rs2::video_frame colorFrame = frames.get_color_frame();
		int width = colorFrame.get_width();
		int height = colorFrame.get_height();
		CreateUpdateableTexture(width, height);

	}
	catch (std::exception e) {
		UE_LOG(RealSenseLog, Error, TEXT("Realsense video stream error: %s"), e.what());
		cameraWorks = false;
	}
}

bool ARealSenseComponent::receiveFrame()
{
	try {
		rs2::frameset frames = pipeline->wait_for_frames();
		rs2::video_frame colorFrame = frames.get_color_frame();
		uint8* data = (uint8*)(colorFrame.get_data());
		int height = colorFrame.get_height();
		int width = colorFrame.get_width();
		int channels = colorFrame.get_bytes_per_pixel();
		int bits = colorFrame.get_bits_per_pixel();
		rs2_format format = colorFrame.get_profile().format();
		int end = width * height * channels;

		UE_LOG(RealSenseLog, Log, TEXT("Image Received. Resolution: %d/%d, Channels: %d, Format: %s"), width, height, channels, *FString(rs2_format_to_string(format)));
		UE_LOG(RealSenseLog, Log, TEXT("First/Last pixel: (%d/%d/%d), (%d/%d/%d)"), data[0], data[1], data[2], data[end - 3], data[end - 2], data[end - 1]);

		// tried differend updating methods
		TextureFromVideo->UpdateTextureRegions(DBL_MAX_10_EXP, 1, textureVideoRegion, static_cast<uint32>(width * bits),
			bits, data, texCleanUpFP);

		//UpdateTextureRegions(TextureFromVideo, DBL_MAX_10_EXP, 1, textureVideoRegion, static_cast<uint32>(width * bits), bits, data, false);
	}
	catch (const rs2::error & e) {

		UE_LOG(RealSenseLog, Error, TEXT("%s"), *FString(e.what()));
		cameraWorks = false;
	}
	catch (const std::exception& e) {

		UE_LOG(RealSenseLog, Error, TEXT("%s"), *FString(e.what()));
		cameraWorks = false;
	}

	return cameraWorks;
}


// Called every frame
void ARealSenseComponent::Tick(float DeltaTime)
{
	//TextureFromVideo->UpdateResource();
	Super::Tick(DeltaTime);

}

void ARealSenseComponent::UpdateTextureRegions(UTexture2D* Texture, int32 MipIndex, uint32 NumRegions, FUpdateTextureRegion2D* Regions, uint32 SrcPitch, uint32 SrcBpp, uint8* SrcData, bool bFreeData)
{
	if (Texture && Texture->Resource)
	{
		struct FUpdateTextureRegionsData
		{
			FTexture2DResource* Texture2DResource;
			int32 MipIndex;
			uint32 NumRegions;
			FUpdateTextureRegion2D* Regions;
			uint32 SrcPitch;
			uint32 SrcBpp;
			uint8* SrcData;
		};

		FUpdateTextureRegionsData* RegionData = new FUpdateTextureRegionsData;

		RegionData->Texture2DResource = (FTexture2DResource*)Texture->Resource;
		RegionData->MipIndex = MipIndex;
		RegionData->NumRegions = NumRegions;
		RegionData->Regions = Regions;
		RegionData->SrcPitch = SrcPitch;
		RegionData->SrcBpp = SrcBpp;
		RegionData->SrcData = SrcData;

		ENQUEUE_UNIQUE_RENDER_COMMAND_TWOPARAMETER(
			UpdateTextureRegionsData,
			FUpdateTextureRegionsData*, RegionData, RegionData,
			bool, bFreeData, bFreeData,
			{
				for (uint32 RegionIndex = 0; RegionIndex < RegionData->NumRegions; ++RegionIndex)
				{
					int32 CurrentFirstMip = RegionData->Texture2DResource->GetCurrentFirstMip();
					if (RegionData->MipIndex >= CurrentFirstMip)
					{
						RHIUpdateTexture2D(
							RegionData->Texture2DResource->GetTexture2DRHI(),
							RegionData->MipIndex - CurrentFirstMip,
							RegionData->Regions[RegionIndex],
							RegionData->SrcPitch,
							RegionData->SrcData
							+ RegionData->Regions[RegionIndex].SrcY * RegionData->SrcPitch
							+ RegionData->Regions[RegionIndex].SrcX * RegionData->SrcBpp
						);
					}
				}
		if (bFreeData)
		{
			FMemory::Free(RegionData->Regions);
			FMemory::Free(RegionData->SrcData);
		}
		delete RegionData;
			});
	}
}

So as result, I can see in the log that new frames are comming. But the texture does not get updated, I can´t see the video ingame:

Any suggestions?

Ah sorry here is the header file:

#pragma once

#include "CoreMinimal.h"
#include "GameFramework/Actor.h"
#include <Engine/Texture2D.h>
#include <rs.hpp>
#include "RealSenseComponent.generated.h"



UCLASS( ClassGroup=(Custom))
class REALSENSEPLUGIN_API ARealSenseComponent : public AActor
{
	GENERATED_BODY()

public:	
	// Sets default values for this component's properties
	ARealSenseComponent();

	UFUNCTION(BlueprintCallable, Category = "RealSense")
		void CreateUpdateableTexture(int img_width, int img_height);

	UFUNCTION(BlueprintCallable, Category = "RealSense")
		UTexture2D* ReceiveRGBFrame();

protected:
	// Called when the game starts
	virtual void BeginPlay() override;

	/** Reveice a frame from realsense device and update texture*/
	bool receiveFrame();

	// texture to update
	UTexture2D* TextureFromVideo = nullptr;
	FUpdateTextureRegion2D* textureVideoRegion = nullptr;

	// Realsense device pipeline
	rs2::pipeline* pipeline = nullptr;

	/** Update texture region from https://wiki.unrealengine.com/Dynamic_Textures */
	void UpdateTextureRegions(UTexture2D* Texture, int32 MipIndex, uint32 NumRegions, FUpdateTextureRegion2D* Regions, uint32 SrcPitch, uint32 SrcBpp, uint8* SrcData, bool bFreeData);

	// from SRWORKS plugin
	void TextureRegionCleanUp(uint8* rawData, const FUpdateTextureRegion2D* region) {};
	TFunction<void(uint8*, const FUpdateTextureRegion2D*)> texCleanUpFP = [this](uint8* rawData, const FUpdateTextureRegion2D* region) { TextureRegionCleanUp(rawData, region); };


public:	
	// Called every frame
	virtual void Tick(float DeltaTime) override;

	bool cameraWorks = false;
	
};

Hello,

how did you set up your material? And if you debug the event tick, what is passed to the value?

Hi, this is my Material right now (Because I found a tutorial for streaming a SceneCapture2D onto a Material, they did set it up like this):

But I also tried it with a “Default Lit” Shading Model, and with the Texture being either attached to the base color or to the emissive color (like above). None of it changed anything

So, I finally found the Problem.
It was a dump mixture of copy-paste-error and misunderstanding.
The problem is the line

TextureFromVideo->UpdateTextureRegions(DBL_MAX_10_EXP, 1, textureVideoRegion, static_cast<uint32>(width * bits), bits, data, texCleanUpFP);

Which I copied from the SRWorks SDK:

 textureDistortedLeft->UpdateTextureRegions(0, 1, textureDistortedRegion, static_cast<uint32>(DistortedImageWidth * sizeof(uint8) * DistortedImageChannel), sizeof(uint8) * DistortedImageChannel, RawDistortedLeftBGRA, texCleanUpFP);

So somewhere in the copying process, the first parameter somehow became DBL_MAX_10_EXP (Still don´t know how the ■■■■ that could happen), and I got confused by the complete unnessecary sizeof(uint8) (which is 1, obviously, but I forgot that and thought it was the count of bits in one byte).

So in the end this is the solution:

int bytes = colorFrame.get_bytes_per_pixel();

TextureFromVideo->UpdateTextureRegions(0, 1, textureVideoRegion, static_cast<uint32>(width * bytes), bytes, data, texCleanUpFP);

So if anyone is interested in one or another thing I was doing here, you can take a look at the github repository. =)