adding Azure Kinect

This commit is contained in:
davoudn 2024-03-06 19:01:40 +03:30
parent 055a0f56bc
commit f0051cb9e1
36 changed files with 1864 additions and 0 deletions

View File

@ -0,0 +1,30 @@
{
"FileVersion": 3,
"Version": 1,
"VersionName": "1.0",
"FriendlyName": "Azure Kinect",
"Description": "Exposes Azure Kinect Support for integration into Unreal Engine Applications",
"Category": "Virtual Production",
"CreatedBy": "Ayumu Nagamatsu",
"CreatedByURL": "https://ayumu-nagamatsu.com/",
"DocsURL": "https://github.com/nama-gatsuo/AzureKinectForUE",
"MarketplaceURL": "",
"SupportURL": "",
"CanContainContent": true,
"IsBetaVersion": true,
"IsExperimentalVersion": false,
"Installed": false,
"SupportedTargetPlatforms": [ "Win64" ],
"Modules": [
{
"Name": "AzureKinect",
"Type": "Runtime",
"LoadingPhase": "Default"
},
{
"Name": "AzureKinectEditor",
"Type": "UncookedOnly",
"LoadingPhase": "Default"
}
]
}

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d7adfa56309ddbd14463c97c015e58c4a2953dc57e9fbb7beed5db4fdef86426
size 136155

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6b12dc821ef481cea7586e9cc31960fc947e84ab967551a6871c00141ec381b0
size 2311

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2cd2e7ed9b61b4489ea1fb518edfa8e0b64a11f4bbca27c368f87a77e905ce1b
size 2468

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e03af29176162d5e48c8ec704503c81fd96ed5903aea6c471d79c0963ff76a63
size 106017

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e050b002ea88e696ae88ef5b0372c76a1624569a0cd552a079329f5d02254340
size 709924

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b1008a02b1d51eb73a6097c7f5d8329c7afb401a8247bf82575bb086246eef95
size 5700

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fa51b6e5323bd5f1f91640563548e29b81c690482a2519daa362651d9afa99c2
size 3794

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:19e600d2b05ab814c1989f619b38759c9b800fec292e1c36bfe8b1ce1a6d3250
size 3794

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:eb306826af33d560fbd9b84552597c83af8efc973a5e0b6e3759697b174155c0
size 3782

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f7234e5624e9a5d214d61ce51bb334b3bce5823bab4abffe59a5201c45f57a6b
size 4458917

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c0cadc057782c801db4d2aa69e4ccb81748dc3cad261def1e452f30ad90e3d90
size 97799

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6a306ec8db21ece7bd831890e9aacb2201bce4e07f00e064088930486a5565f3
size 119202

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:efd5130bf40d083e1bfb8752f7ae6f5e15452522a5754725ac544f12686daf0c
size 648981

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7aed07f93d3baef8a82110f48e6839462ed3efa41ed88b42caf08484f5ad0352
size 426986

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7ac426f44dcf2daba2aee446cd378199fd593e5ccb7505658260c054ef313166
size 4677882

View File

@ -0,0 +1,88 @@
# Azure Kinect for Unreal Engine
![](./Docs/kinect.png)
Exposes Azure Kinect Support for integration into Unreal Engine Applications.
Mainly for depth and color textures creation from Kinect's raw feed.
## Prerequisites
* Platform: Win64
* Dependencies:
* `Azure Kinect SDK v1.4.1` is installed
* Download from [here](https://github.com/microsoft/Azure-Kinect-Sensor-SDK/blob/develop/docs/usage.md)
* An env variable `AZUREKINECT_SDK` that points to the Azure Kinect SDK root path should be registered.
* `Azure Kinect Body Tracking SDK v1.1.0` is installed
* Download from [here](https://docs.microsoft.com/en-us/azure/Kinect-dk/body-sdk-download)
* An env variable `AZUREKINECT_BODY_SDK` that points to the Azure Kinect Body Tracking SDK root path should be registered.
* This plugin cannot be neither built nor open without the SDKs and paths above.
* [AzureKinect.Build.cs](https://github.com/nama-gatsuo/AzureKinectForUE/blob/master/Source/AzureKinect/AzureKinect.Build.cs) describes how it resolves dependent paths.
* Unreal Engine 4.27~
* Only tested with 4.27. May work with lower.
## Features
### In-Editor activation
* Write Depth / Color buffer into `RenderTarget2D`s.
![](./Docs/in-editor.gif)
### Blueprint activation
![](./Docs/bp.png)
### Niagara Particle
* You can modify base a niagara system `NS_KinectParticle`.
![](./Docs/animation.gif)
### Skeleton tracking
![](./Docs/skeletonAnim.gif)
* Bone mapping node in Anim Graph
![](./Docs/animgraph.jpg)
## Notice
Depthe data are stored `RenderTarget2D` into standard 8bit RGBA texture.
R: first 8bit as `uint8` of original `uint16` sample
G: last 8bit as `uint8` of original `uint16` sample
B: `0x00` or `0xFF` (if depth sample is invalid)
A: `0xFF` (Constant value)
Thus we need conversion to acquire orignal depth samples.
```
// In MaterialEditor or Niagara, sample values in Depth texture are normalized to 0-1.
float DepthSample = (G * 256.0 + R) * 256.0; // millimetor
```
```
// In C++
uint8 R = Sample.R, G = Sample.G;
uint16 DepthSample = G << 8 | R; // millimetor
```
Depth pixel from Azure Kinect SDK is originally a single `uint16` in millimetor. But `RenderTarget2D` can't store `uint16` as texture (`EPixelFormat::PF_R16_UINT` doesn't work for RenderTarget).
# Reference
Existing plugin for Azure Kinect
* [secretlocation/azure-kinect-unreal](https://github.com/secretlocation/azure-kinect-unreal/)
* Body tracking only, not support point cloud and texture(s)
* Referred a lot from this repo
# License
## MIT License
Copyright 2021 Ayumu Nagamtsu
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,64 @@
// Copyright Epic Games, Inc. All Rights Reserved.
using System.IO;
using UnrealBuildTool;
public class AzureKinect : ModuleRules
{
public AzureKinect(ReadOnlyTargetRules Target) : base(Target)
{
PCHUsage = ModuleRules.PCHUsageMode.UseExplicitOrSharedPCHs;
if (Target.Platform == UnrealTargetPlatform.Win64)
{
string sdkPath = System.Environment.GetEnvironmentVariable("AZUREKINECT_SDK");
string bodySdkPath = System.Environment.GetEnvironmentVariable("AZUREKINECT_BODY_SDK");
PublicIncludePaths.AddRange(
new string[] {
Path.Combine(sdkPath, "sdk", "include"),
Path.Combine(bodySdkPath, "sdk", "include")
});
PublicAdditionalLibraries.AddRange(
new string[] {
Path.Combine(sdkPath, "sdk", "windows-desktop", "amd64", "release", "lib", "k4a.lib"),
Path.Combine(sdkPath, "sdk", "windows-desktop", "amd64", "release", "lib", "k4arecord.lib"),
Path.Combine(bodySdkPath, "sdk", "windows-desktop", "amd64", "release", "lib", "k4abt.lib")
});
string depthEngineDllPath = Path.Combine(sdkPath, "sdk", "windows-desktop", "amd64", "release", "bin", "depthengine_2_0.dll");
string k4aDllPath = Path.Combine(sdkPath, "sdk", "windows-desktop", "amd64", "release", "bin", "k4a.dll");
string k4abtDllPath = Path.Combine(bodySdkPath, "sdk", "windows-desktop", "amd64", "release", "bin", "k4abt.dll");
PublicDelayLoadDLLs.AddRange(
new string[] {
depthEngineDllPath,
k4aDllPath,
k4abtDllPath,
});
RuntimeDependencies.Add(depthEngineDllPath);
RuntimeDependencies.Add(k4aDllPath);
RuntimeDependencies.Add(k4abtDllPath);
}
PrivateIncludePaths.AddRange(
new string[]
{
"AzureKinect/Private",
});
PrivateDependencyModuleNames.AddRange(
new string[]
{
"Core",
"CoreUObject",
"Engine",
"RenderCore",
"RHI",
"AnimGraphRuntime",
});
}
}

View File

@ -0,0 +1,61 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "AnimNode_AzureKinectPose.h"
#include "Animation/AnimInstanceProxy.h"
#include "AnimationRuntime.h"
#include "k4abttypes.h"
DEFINE_LOG_CATEGORY(AzureKinectAnimNodeLog);
FAnimNode_AzureKinectPose::FAnimNode_AzureKinectPose()
{
BonesToModify.Reserve(K4ABT_JOINT_COUNT);
for (int i = 0; i < K4ABT_JOINT_COUNT; i++)
{
BonesToModify.Add(static_cast<EKinectBodyJoint>(i), FBoneReference());
}
}
void FAnimNode_AzureKinectPose::Update_AnyThread(const FAnimationUpdateContext& Context)
{
DECLARE_SCOPE_HIERARCHICAL_COUNTER_ANIMNODE(Update_AnyThread);
GetEvaluateGraphExposedInputs().Execute(Context);
USkeletalMeshComponent* SkelMesh = Context.AnimInstanceProxy->GetSkelMeshComponent();
BoneTransforms.Reset(K4ABT_JOINT_COUNT);
for (int i = 0; i < Skeleton.Joints.Num(); i++)
{
EKinectBodyJoint JointIndex = static_cast<EKinectBodyJoint>(i);
if (BonesToModify.Contains(JointIndex))
{
int32 BoneIndex = SkelMesh->GetBoneIndex(BonesToModify[JointIndex].BoneName);
if (BoneIndex != INDEX_NONE)
{
FCompactPoseBoneIndex CompactBoneIndex(BoneIndex);
BoneTransforms.Emplace(CompactBoneIndex, Skeleton.Joints[i]);
}
}
}
}
void FAnimNode_AzureKinectPose::EvaluateComponentSpace_AnyThread(FComponentSpacePoseContext& Output)
{
DECLARE_SCOPE_HIERARCHICAL_COUNTER_ANIMNODE(EvaluateComponentSpace_AnyThread)
Output.ResetToRefPose();
for (const FBoneTransform& BoneTransform : BoneTransforms)
{
FTransform Transform = Output.Pose.GetComponentSpaceTransform(BoneTransform.BoneIndex);
Transform.SetRotation(BoneTransform.Transform.Rotator().Quaternion());
Output.Pose.SetComponentSpaceTransform(BoneTransform.BoneIndex, Transform);
}
}

View File

@ -0,0 +1,658 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "AzureKinectDevice.h"
#include "Runtime/RHI/Public/RHI.h"
DEFINE_LOG_CATEGORY(AzureKinectDeviceLog);
UAzureKinectDevice::UAzureKinectDevice() :
NativeDevice(nullptr),
Thread(nullptr),
DeviceIndex(-1),
bOpen(false),
NumTrackedSkeletons(0),
DepthMode(EKinectDepthMode::NFOV_2X2BINNED),
ColorMode(EKinectColorResolution::RESOLUTION_720P),
Fps(EKinectFps::PER_SECOND_30),
SensorOrientation(EKinectSensorOrientation::DEFAULT),
bSkeletonTracking(false)
{
LoadDevices();
}
UAzureKinectDevice::UAzureKinectDevice(const FObjectInitializer& ObjectInitializer) :
Super(ObjectInitializer)
{
LoadDevices();
}
void UAzureKinectDevice::LoadDevices()
{
int32 NumKinect = GetNumConnectedDevices();
DeviceList.Empty(NumKinect + 1);
DeviceList.Add(MakeShared<FString>("No Device"));
if (NumKinect > 0)
{
for (int32 i = 0; i < NumKinect; i++)
{
try
{
// Open connection to the device.
k4a::device Device = k4a::device::open(i);
// Get and store the device serial number
DeviceList.Add(MakeShared<FString>(Device.get_serialnum().c_str()));
Device.close();
}
catch (const k4a::error& Err)
{
UE_LOG(AzureKinectDeviceLog, Error, TEXT("Can't load: %s"), TCHAR_TO_UTF8(ANSI_TO_TCHAR(Err.what())));
}
}
}
}
bool UAzureKinectDevice::StartDevice()
{
if (bOpen)
{
UE_LOG(AzureKinectDeviceLog, Warning, TEXT("This Device has been open."));
return false;
}
if (DeviceIndex == -1)
{
UE_LOG(AzureKinectDeviceLog, Warning, TEXT("No Device is selected."));
return false;
}
CalcFrameCount();
try
{
// Open connection to the device.
NativeDevice = k4a::device::open(DeviceIndex);
// Start the Camera and make sure the Depth Camera is Enabled
k4a_device_configuration_t DeviceConfig = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
DeviceConfig.depth_mode = static_cast<k4a_depth_mode_t>(DepthMode);
DeviceConfig.color_resolution = static_cast<k4a_color_resolution_t>(ColorMode);
DeviceConfig.camera_fps = static_cast<k4a_fps_t>(Fps);
DeviceConfig.color_format = k4a_image_format_t::K4A_IMAGE_FORMAT_COLOR_BGRA32;
DeviceConfig.synchronized_images_only = true;
DeviceConfig.wired_sync_mode = K4A_WIRED_SYNC_MODE_STANDALONE;
NativeDevice.start_cameras(&DeviceConfig);
KinectCalibration = NativeDevice.get_calibration(DeviceConfig.depth_mode, DeviceConfig.color_resolution);
KinectTransformation = k4a::transformation(KinectCalibration);
if (bSkeletonTracking)
{
k4abt_tracker_configuration_t TrackerConfig = K4ABT_TRACKER_CONFIG_DEFAULT;
TrackerConfig.sensor_orientation = static_cast<k4abt_sensor_orientation_t>(SensorOrientation);
// Retain body tracker
BodyTracker = k4abt::tracker::create(KinectCalibration, TrackerConfig);
}
}
catch (const k4a::error& Err)
{
if (NativeDevice)
{
NativeDevice.close();
}
FString Msg(ANSI_TO_TCHAR(Err.what()));
UE_LOG(AzureKinectDeviceLog, Error, TEXT("Cant't open: %s"), *Msg);
return false;
}
Thread = new FAzureKinectDeviceThread(this);
bOpen = true;
return true;
}
bool UAzureKinectDevice::StopDevice()
{
if (!bOpen)
{
UE_LOG(AzureKinectDeviceLog, Warning, TEXT("KinectDevice is not running."));
return false;
}
if (Thread)
{
Thread->EnsureCompletion();
Thread = nullptr;
}
if (BodyTracker)
{
BodyTracker.shutdown();
BodyTracker.destroy();
BodyTracker = nullptr;
}
if (RemapImage)
{
RemapImage.reset();
}
if (NativeDevice)
{
NativeDevice.stop_cameras();
NativeDevice.close();
NativeDevice = nullptr;
UE_LOG(AzureKinectDeviceLog, Verbose, TEXT("KinectDevice Camera is Stopped and Closed."));
}
bOpen = false;
return true;
}
int32 UAzureKinectDevice::GetNumConnectedDevices()
{
return k4a_device_get_installed_count();
}
int32 UAzureKinectDevice::GetNumTrackedSkeletons() const
{
if (!bOpen)
{
return 0;
}
if (!bSkeletonTracking)
{
UE_LOG(AzureKinectDeviceLog, Error, TEXT("GetNumTrackedBodies: Skeleton Tracking is disabled!"));
return 0;
}
FScopeLock Lock(Thread->GetCriticalSection());
return NumTrackedSkeletons;
}
FAzureKinectSkeleton UAzureKinectDevice::GetSkeleton(int32 Index) const
{
if (bOpen)
{
if (!bSkeletonTracking)
{
UE_LOG(AzureKinectDeviceLog, Error, TEXT("GetSkeleton: Skeleton Tracking is disabled!"));
return FAzureKinectSkeleton();
}
FScopeLock Lock(Thread->GetCriticalSection());
if (Skeletons.IsValidIndex(Index))
{
return Skeletons[Index];
}
else
{
UE_LOG(AzureKinectDeviceLog, Error, TEXT("GetSkeleton: Index is out of range!"));
return FAzureKinectSkeleton();
}
}
else
{
return FAzureKinectSkeleton();
}
}
const TArray<FAzureKinectSkeleton>& UAzureKinectDevice::GetSkeletons() const {
if (bOpen)
{
FScopeLock Lock(Thread->GetCriticalSection());
return Skeletons;
}
else
{
return Skeletons;
}
}
void UAzureKinectDevice::UpdateAsync()
{
// Threaded function
try
{
if (!NativeDevice.get_capture(&Capture, FrameTime))
{
UE_LOG(AzureKinectDeviceLog, Verbose, TEXT("Timed out waiting for capture."));
}
}
catch (const k4a::error& Err)
{
FString Msg(ANSI_TO_TCHAR(Err.what()));
UE_LOG(AzureKinectDeviceLog, Error, TEXT("Can't capture frame: %s"), *Msg);
return;
}
if (ColorMode != EKinectColorResolution::RESOLUTION_OFF && ColorTexture)
{
CaptureColorImage();
}
if (DepthMode != EKinectDepthMode::OFF && DepthTexture)
{
CaptureDepthImage();
}
if (DepthMode != EKinectDepthMode::OFF && InflaredTexture)
{
CaptureInflaredImage();
}
if (bSkeletonTracking && BodyTracker)
{
UpdateSkeletons();
}
Capture.reset();
}
void UAzureKinectDevice::CaptureColorImage()
{
int32 Width = 0, Height = 0;
uint8* SourceBuffer;
if (RemapMode == EKinectRemap::COLOR_TO_DEPTH)
{
k4a::image DepthCapture = Capture.get_depth_image();
k4a::image ColorCapture = Capture.get_color_image();
if (!DepthCapture.is_valid() || !ColorCapture.is_valid()) return;
Width = DepthCapture.get_width_pixels();
Height = DepthCapture.get_height_pixels();
if (Width == 0 || Height == 0) return;
//
if (!RemapImage || !RemapImage.is_valid())
{
RemapImage = k4a::image::create(K4A_IMAGE_FORMAT_COLOR_BGRA32, Width, Height, Width * static_cast<int>(sizeof(uint8) * 4));
}
try
{
KinectTransformation.color_image_to_depth_camera(DepthCapture, ColorCapture, &RemapImage);
}
catch (const k4a::error& Err)
{
FString Msg(ANSI_TO_TCHAR(Err.what()));
UE_LOG(AzureKinectDeviceLog, Error, TEXT("Cant't transform Color to Depth: %s"), *Msg);
return;
}
SourceBuffer = RemapImage.get_buffer();
DepthCapture.reset();
ColorCapture.reset();
}
else
{
k4a::image ColorCapture = Capture.get_color_image();
if (!ColorCapture.is_valid()) return;
Width = ColorCapture.get_width_pixels();
Height = ColorCapture.get_height_pixels();
if (Width == 0 || Height == 0) return;
SourceBuffer = ColorCapture.get_buffer();
ColorCapture.reset();
}
if (ColorTexture->GetSurfaceWidth() != Width || ColorTexture->GetSurfaceHeight() != Height)
{
ColorTexture->InitCustomFormat(Width, Height, EPixelFormat::PF_B8G8R8A8, false);
ColorTexture->RenderTargetFormat = ETextureRenderTargetFormat::RTF_RGBA8;
ColorTexture->UpdateResource();
}
else
{
FTextureResource* TextureResource = ColorTexture->Resource;
auto Region = FUpdateTextureRegion2D(0, 0, 0, 0, Width, Height);
ENQUEUE_RENDER_COMMAND(UpdateTextureData)(
[TextureResource, Region, SourceBuffer](FRHICommandListImmediate& RHICmdList) {
FTexture2DRHIRef Texture2D = TextureResource->TextureRHI ? TextureResource->TextureRHI->GetTexture2D() : nullptr;
if (!Texture2D)
{
return;
}
RHIUpdateTexture2D(Texture2D, 0, Region, 4 * Region.Width, SourceBuffer);
});
}
}
void UAzureKinectDevice::CaptureDepthImage()
{
int32 Width = 0, Height = 0;
uint8* SourceBuffer;
if (RemapMode == EKinectRemap::DEPTH_TO_COLOR)
{
k4a::image DepthCapture = Capture.get_depth_image();
k4a::image ColorCapture = Capture.get_color_image();
if (!DepthCapture.is_valid() || !ColorCapture.is_valid()) return;
Width = ColorCapture.get_width_pixels();
Height = ColorCapture.get_height_pixels();
if (Width == 0 || Height == 0) return;
//
if (!RemapImage || !RemapImage.is_valid())
{
RemapImage = k4a::image::create(K4A_IMAGE_FORMAT_DEPTH16, Width, Height, Width * static_cast<int>(sizeof(uint16)));
}
try
{
KinectTransformation.depth_image_to_color_camera(DepthCapture, &RemapImage);
}
catch (const k4a::error& Err)
{
FString Msg(ANSI_TO_TCHAR(Err.what()));
UE_LOG(AzureKinectDeviceLog, Error, TEXT("Cant't transform Depth to Color: %s"), *Msg);
return;
}
SourceBuffer = RemapImage.get_buffer();
DepthCapture.reset();
ColorCapture.reset();
}
else
{
k4a::image DepthCapture = Capture.get_depth_image();
if (!DepthCapture.is_valid()) return;
Width = DepthCapture.get_width_pixels();
Height = DepthCapture.get_height_pixels();
if (Width == 0 || Height == 0) return;
SourceBuffer = DepthCapture.get_buffer();
DepthCapture.reset();
}
if (DepthTexture->GetSurfaceWidth() != Width || DepthTexture->GetSurfaceHeight() != Height)
{
DepthTexture->InitCustomFormat(Width, Height, EPixelFormat::PF_R8G8B8A8, true);
DepthTexture->RenderTargetFormat = ETextureRenderTargetFormat::RTF_RGBA8;
DepthTexture->UpdateResource();
}
else
{
TArray<uint8> SrcData;
SrcData.Reset(Width * Height * 4);
for (int hi = 0; hi < Height; hi++)
{
for (int wi = 0; wi < Width; wi++)
{
int index = hi * Width + wi;
uint16 R = SourceBuffer[index * 2];
uint16 G = SourceBuffer[index * 2 + 1];
uint16 Sample = G << 8 | R;
SrcData.Push(SourceBuffer[index * 2]);
SrcData.Push(SourceBuffer[index * 2 + 1]);
SrcData.Push(Sample > 0 ? 0x00 : 0xFF);
SrcData.Push(0xFF);
}
}
FTextureResource* TextureResource = DepthTexture->Resource;
auto Region = FUpdateTextureRegion2D(0, 0, 0, 0, Width, Height);
ENQUEUE_RENDER_COMMAND(UpdateTextureData)(
[TextureResource, Region, SrcData](FRHICommandListImmediate& RHICmdList) {
FTexture2DRHIRef Texture2D = TextureResource->TextureRHI ? TextureResource->TextureRHI->GetTexture2D() : nullptr;
if (!Texture2D)
{
return;
}
RHIUpdateTexture2D(Texture2D, 0, Region, 4 * Region.Width, SrcData.GetData());
});
}
}
void UAzureKinectDevice::CaptureInflaredImage()
{
const k4a::image& InflaredCapture = Capture.get_ir_image();
if (!InflaredCapture.is_valid()) return;
int32 Width = InflaredCapture.get_width_pixels(), Height = InflaredCapture.get_height_pixels();
if (Width == 0 || Height == 0) return;
if (InflaredTexture->GetSurfaceWidth() != Width || InflaredTexture->GetSurfaceWidth() != Height)
{
InflaredTexture->InitCustomFormat(Width, Height, EPixelFormat::PF_R8G8B8A8, true);
InflaredTexture->RenderTargetFormat = ETextureRenderTargetFormat::RTF_RGBA8;
InflaredTexture->UpdateResource();
}
else
{
const uint8* S = InflaredCapture.get_buffer();
TArray<uint8> SrcData;
SrcData.Reset(Width * Height * 4);
for (int hi = 0; hi < Height; hi++)
{
for (int wi = 0; wi < Width; wi++)
{
int index = hi * Width + wi;
if (S[index * 2] + S[index * 2 + 1] > 0)
{
SrcData.Push(S[index * 2]);
SrcData.Push(S[index * 2 + 1]);
SrcData.Push(0x00);
SrcData.Push(0xff);
}
else
{
SrcData.Push(0x00);
SrcData.Push(0x00);
SrcData.Push(0xff);
SrcData.Push(0xff);
}
}
}
FTextureResource* TextureResource = InflaredTexture->Resource;
auto Region = FUpdateTextureRegion2D(0, 0, 0, 0, Width, Height);
ENQUEUE_RENDER_COMMAND(UpdateTextureData)(
[TextureResource, Region, SrcData](FRHICommandListImmediate& RHICmdList) {
FTexture2DRHIRef Texture2D = TextureResource->TextureRHI ? TextureResource->TextureRHI->GetTexture2D() : nullptr;
if (!Texture2D)
{
return;
}
RHIUpdateTexture2D(Texture2D, 0, Region, 4 * Region.Width, SrcData.GetData());
});
}
}
void UAzureKinectDevice::CaptureBodyIndexImage(const k4abt::frame& BodyFrame)
{
k4a::image BodyIndexMap = BodyFrame.get_body_index_map();
int32 Width = BodyIndexMap.get_width_pixels(), Height = BodyIndexMap.get_height_pixels();
if (Width == 0 || Height == 0) return;
if (BodyIndexTexture->GetSurfaceWidth() != Width || BodyIndexTexture->GetSurfaceHeight() != Height)
{
BodyIndexTexture->InitCustomFormat(Width, Height, EPixelFormat::PF_R8G8B8A8, true);
BodyIndexTexture->RenderTargetFormat = ETextureRenderTargetFormat::RTF_RGBA8;
BodyIndexTexture->UpdateResource();
}
else
{
uint8* S = BodyIndexMap.get_buffer();
TArray<uint8> SrcData;
SrcData.Reset(Width * Height * 4);
for (int i = 0; i < Width * Height; i++)
{
SrcData.Push(S[i]);
SrcData.Push(S[i]);
SrcData.Push(S[i]);
SrcData.Push(0xff);
}
FTextureResource* TextureResource = BodyIndexTexture->Resource;
auto Region = FUpdateTextureRegion2D(0, 0, 0, 0, Width, Height);
ENQUEUE_RENDER_COMMAND(UpdateTextureData)(
[TextureResource, Region, SrcData](FRHICommandListImmediate& RHICmdList) {
FTexture2DRHIRef Texture2D = TextureResource->TextureRHI ? TextureResource->TextureRHI->GetTexture2D() : nullptr;
if (!Texture2D)
{
return;
}
RHIUpdateTexture2D(Texture2D, 0, Region, 4 * Region.Width, SrcData.GetData());
});
}
}
void UAzureKinectDevice::UpdateSkeletons()
{
k4abt::frame BodyFrame = nullptr;
TArray<int32> BodyIDs;
try
{
if (!BodyTracker.enqueue_capture(Capture, FrameTime))
{
UE_LOG(AzureKinectDeviceLog, Warning, TEXT("Failed adding capture to tracker process queue"));
return;
}
if (!BodyTracker.pop_result(&BodyFrame, FrameTime))
{
UE_LOG(AzureKinectDeviceLog, Warning, TEXT("Failed Tracker pop body frame"));
return;
}
}
catch (const k4a::error& Err)
{
FString Msg(ANSI_TO_TCHAR(Err.what()));
UE_LOG(AzureKinectDeviceLog, Error, TEXT("Couldn't get Body Frame: %s"), *Msg);
}
if (BodyIndexTexture)
{
CaptureBodyIndexImage(BodyFrame);
}
{
FScopeLock Lock(Thread->GetCriticalSection());
NumTrackedSkeletons = BodyFrame.get_num_bodies();
Skeletons.Reset(NumTrackedSkeletons);
for (int32 i = 0; i < NumTrackedSkeletons; i++)
{
k4abt_body_t Body;
FAzureKinectSkeleton Skeleton;
BodyFrame.get_body_skeleton(i, Body.skeleton);
Skeleton.ID = BodyFrame.get_body_id(i);
Skeleton.Joints.Reset(K4ABT_JOINT_COUNT);
for (int32 j = 0; j < K4ABT_JOINT_COUNT; j++)
{
Skeleton.Joints.Push(JointToTransform(Body.skeleton.joints[j], j));
}
Skeletons.Push(Skeleton);
}
}
BodyFrame.reset();
}
FTransform UAzureKinectDevice::JointToTransform(const k4abt_joint_t& Joint, int32 Index)
{
// This transform algorithm is introdeced from
// https://github.com/secretlocation/azure-kinect-unreal/
// Still there is room to refactor...
/**
* Convert Azure Kinect Depth and Color camera co-ordinate system
* to Unreal co-ordinate system
* @see https://docs.microsoft.com/en-us/azure/kinect-dk/coordinate-systems
*
* Kinect [mm] Unreal [cm]
* --------------------------------------
* +ve X-axis Right +ve Y-axis
* +ve Y-axis Down -ve Z-axis
* +ve Z-axis Forward +ve X-axis
*/
FVector Position(Joint.position.xyz.z, Joint.position.xyz.x, - Joint.position.xyz.y);
Position *= 0.1f;
/**
* Convert the Orientation from Kinect co-ordinate system to Unreal co-ordinate system.
* We negate the x, y components of the JointQuaternion since we are converting from
* Kinect's Right Hand orientation to Unreal's Left Hand orientation.
*/
FQuat Quat(
-Joint.orientation.wxyz.x,
-Joint.orientation.wxyz.y,
Joint.orientation.wxyz.z,
Joint.orientation.wxyz.w
);
return FTransform(Quat, Position);
}
void UAzureKinectDevice::CalcFrameCount()
{
float FrameTimeInMilli = 0.0f;
switch (Fps)
{
case EKinectFps::PER_SECOND_5:
FrameTimeInMilli = 1000.f / 5.f;
break;
case EKinectFps::PER_SECOND_15:
FrameTimeInMilli = 1000.f / 15.f;
break;
case EKinectFps::PER_SECOND_30:
FrameTimeInMilli = 1000.f / 30.f;
break;
default:
break;
}
FrameTime = std::chrono::milliseconds(FMath::CeilToInt(FrameTimeInMilli));
}

View File

@ -0,0 +1,70 @@
#include "AzureKinectDeviceThread.h"
#include "HAL/PlatformProcess.h"
#include "AzureKinectDevice.h"
DEFINE_LOG_CATEGORY(AzureKinectThreadLog);
FAzureKinectDeviceThread::FAzureKinectDeviceThread(UAzureKinectDevice* Device) :
KinectDevice(Device),
Thread(nullptr),
StopTaskCounter(0)
{
Thread = FRunnableThread::Create(this, TEXT("FAzureKinectDeviceThread"), 0, TPri_BelowNormal); //windows default = 8mb for thread, could specify more
if (!Thread)
{
UE_LOG(AzureKinectThreadLog, Error, TEXT("Failed to create Azure Kinect thread."));
}
}
FAzureKinectDeviceThread::~FAzureKinectDeviceThread()
{
if (Thread)
{
delete Thread;
Thread = nullptr;
}
}
bool FAzureKinectDeviceThread::Init()
{
UE_LOG(AzureKinectThreadLog, Verbose, TEXT("Azure Kinect thread started."));
return true;
}
uint32 FAzureKinectDeviceThread::Run()
{
if (!KinectDevice)
{
UE_LOG(AzureKinectThreadLog, Error, TEXT("KinectDevice is null, could not run the thread"));
return 1;
}
while (StopTaskCounter.GetValue() == 0)
{
// Do the Kinect capture, enqueue, pop body frame stuff
KinectDevice->UpdateAsync();
}
return 0;
}
void FAzureKinectDeviceThread::Stop()
{
StopTaskCounter.Increment();
}
void FAzureKinectDeviceThread::EnsureCompletion()
{
Stop();
if (Thread)
{
Thread->WaitForCompletion();
}
}
FCriticalSection* FAzureKinectDeviceThread::GetCriticalSection()
{
return &CriticalSection;
}

View File

@ -0,0 +1,21 @@
// Copyright Epic Games, Inc. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Modules/ModuleManager.h"
#define LOCTEXT_NAMESPACE "FAzureKinectModule"
class FAzureKinectModule : public IModuleInterface
{
public:
/** IModuleInterface implementation */
virtual void StartupModule() override {};
virtual void ShutdownModule() override {};
};
#undef LOCTEXT_NAMESPACE
IMPLEMENT_MODULE(FAzureKinectModule, AzureKinect)

View File

@ -0,0 +1,37 @@
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "BonePose.h"
#include "BoneControllers/AnimNode_SkeletalControlBase.h"
#include "AzureKinectDevice.h"
#include "AnimNode_AzureKinectPose.generated.h"
DECLARE_LOG_CATEGORY_EXTERN(AzureKinectAnimNodeLog, Log, All);
/**
*
*/
USTRUCT(BlueprintInternalUseOnly)
struct AZUREKINECT_API FAnimNode_AzureKinectPose : public FAnimNode_Base
{
GENERATED_BODY()
public:
FAnimNode_AzureKinectPose();
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category="Transform", meta = (PinShownByDefault))
FAzureKinectSkeleton Skeleton;
UPROPERTY(EditAnywhere, Category="Bone Mapping")
TMap<EKinectBodyJoint, FBoneReference> BonesToModify;
// FAnimNode_Base interface
virtual void Update_AnyThread(const FAnimationUpdateContext& Context) override;
virtual void EvaluateComponentSpace_AnyThread(FComponentSpacePoseContext& Output) override;
private:
TArray<FBoneTransform> BoneTransforms;
};

View File

@ -0,0 +1,148 @@
#pragma once
#include "CoreMinimal.h"
#include "Engine/TextureRenderTarget2D.h"
#include "Animation/SkeletalMeshActor.h"
#include "k4a/k4a.hpp"
#include "k4abt.hpp"
#include "AzureKinectEnum.h"
#include "AzureKinectDeviceThread.h"
#include "AzureKinectDevice.generated.h"
USTRUCT(BlueprintType)
struct FAzureKinectSkeleton
{
GENERATED_BODY()
UPROPERTY(BlueprintReadWrite)
int32 ID;
UPROPERTY(BlueprintReadWrite)
TArray<FTransform> Joints;
};
DECLARE_LOG_CATEGORY_EXTERN(AzureKinectDeviceLog, Log, All);
UCLASS(BlueprintType, hidecategories=(Object))
class AZUREKINECT_API UAzureKinectDevice : public UObject
{
GENERATED_BODY()
public:
UAzureKinectDevice();
UAzureKinectDevice(const FObjectInitializer& ObjectInitializer);
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "IO")
UTextureRenderTarget2D* DepthTexture;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "IO")
UTextureRenderTarget2D* ColorTexture;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "IO")
UTextureRenderTarget2D* InflaredTexture;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "IO")
UTextureRenderTarget2D* BodyIndexTexture;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Config")
EKinectDepthMode DepthMode;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Config")
EKinectColorResolution ColorMode;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Config")
EKinectRemap RemapMode;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Config")
EKinectFps Fps;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Config")
EKinectSensorOrientation SensorOrientation = EKinectSensorOrientation::DEFAULT;
UPROPERTY(BlueprintReadWrite, Category = "Config")
int32 DeviceIndex;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Config")
bool bSkeletonTracking;
UFUNCTION(BlueprintCallable, Category = "IO")
static int32 GetNumConnectedDevices();
UFUNCTION(BlueprintCallable, Category = "IO")
void LoadDevices();
/**
* Call "open" and "start_camara" to Native Kinect Device
* and return result. Then start a thread for Kinect's data feed.
* Device Index should be specified in advance;
*/
UFUNCTION(BlueprintCallable, Category = "IO")
bool StartDevice();
/**
* Call "stop_camara" and "close" to Native Kinect Device,
* and release all instaces about Native Kinect.
*/
UFUNCTION(BlueprintCallable, Category = "IO")
bool StopDevice();
/**
* Check if Kinect Device is open.
*/
UFUNCTION(BlueprintCallable, Category = "IO")
bool IsOpen() const { return bOpen; }
/**
* Return a number of Skeletons currently aquired and stored.
*/
UFUNCTION(BlueprintCallable, Category = "Skeletons")
int32 GetNumTrackedSkeletons() const;
/**
* Return an array of Skeletons currently aquired and stored.
*/
UFUNCTION(BlueprintCallable, Category = "Skeletons")
const TArray<FAzureKinectSkeleton>& GetSkeletons() const;
/**
* Return a Skeleton struct by Index (not Skeleton ID).
* If given Index is out of range, return a null struct.
*/
UFUNCTION(BlueprintCallable, Category = "Skeletons")
FAzureKinectSkeleton GetSkeleton(int32 Index) const;
/**
* Update and process raw feed from Kinect Device asynchronously.
* Should be called out of main thread.
*/
void UpdateAsync();
TArray<TSharedPtr<FString>> DeviceList;
private:
bool bOpen;
void CaptureColorImage();
void CaptureDepthImage();
void CaptureInflaredImage();
void CaptureBodyIndexImage(const k4abt::frame& BodyFrame);
static FTransform JointToTransform(const k4abt_joint_t& Joint, int32 Index);
void UpdateSkeletons();
void CalcFrameCount();
k4a::device NativeDevice;
k4a::capture Capture;
std::chrono::milliseconds FrameTime;
k4a::image RemapImage;
k4a::calibration KinectCalibration;
k4a::transformation KinectTransformation;
k4abt::tracker BodyTracker;
FAzureKinectDeviceThread* Thread;
int32 NumTrackedSkeletons;
TArray<FAzureKinectSkeleton> Skeletons;
};

View File

@ -0,0 +1,40 @@
#pragma once
#include "CoreMinimal.h"
#include "HAL/Runnable.h"
#include "HAL/RunnableThread.h"
DECLARE_LOG_CATEGORY_EXTERN(AzureKinectThreadLog, Log, All);
class UAzureKinectDevice;
class FAzureKinectDeviceThread : public FRunnable
{
public:
FAzureKinectDeviceThread(UAzureKinectDevice* Device);
virtual ~FAzureKinectDeviceThread();
virtual bool Init();
virtual uint32 Run();
virtual void Stop();
/** Stops the threadand waits for its completion. */
void EnsureCompletion();
FCriticalSection* GetCriticalSection();
private:
/** Thread handle.Control the thread using this, with operators like Killand Suspend */
FRunnableThread* Thread;
/** Stop this thread? Uses Thread Safe Counter */
FThreadSafeCounter StopTaskCounter;
/** The device that starts this thread. */
UAzureKinectDevice* KinectDevice;
/** To be used for UScopeLock */
FCriticalSection CriticalSection;
};

View File

@ -0,0 +1,99 @@
#pragma once
#include "AzureKinectEnum.generated.h"
/**
* Blueprintable enum defined based on k4a_depth_mode_t from k4atypes.h
*
* @note This should always have the same enum values as k4a_depth_mode_t
*/
UENUM(BlueprintType, Category = "Azure Kinect|Enums")
enum class EKinectDepthMode : uint8
{
OFF = 0 UMETA(DisplayName = "Depth Mode Off"), /**< Depth sensor will be turned off with this setting. */
NFOV_2X2BINNED UMETA(DisplayName = "NFOV 2x2 Binned (320x288)"), /**< Depth captured at 320x288. Passive IR is also captured at 320x288. */
NFOV_UNBINNED UMETA(DisplayName = "NFOV Unbinned (640x576)"), /**< Depth captured at 640x576. Passive IR is also captured at 640x576. */
WFOV_2X2BINNED UMETA(DisplayName = "WFOV 2x2 Binned (512x512)"), /**< Depth captured at 512x512. Passive IR is also captured at 512x512. */
WFOV_UNBINNED UMETA(DisplayName = "WFOV Unbinned (1024x1024)"), /**< Depth captured at 1024x1024. Passive IR is also captured at 1024x1024. */
PASSIVE_IR UMETA(DisplayName = "Passive IR (1024x1024)"), /**< Passive IR only, captured at 1024x1024. */
};
UENUM(BlueprintType, Category = "Azure Kinect|Enums")
enum class EKinectColorResolution : uint8
{
RESOLUTION_OFF = 0 UMETA(DisplayName = "Color Camera Turned Off"), /**< Color sensor will be turned off with this setting. */
RESOLUTION_720P UMETA(DisplayName = "1280 x 720 [16:9]"), /**< Color captured at 1280 x 720. */
RESOLUTION_1440P UMETA(DisplayName = "2560 x 1440 [16:9]"), /**< Color captured at 2560 x 1440. */
RESOLUTION_1536P UMETA(DisplayName = "2048 x 1536 [4:3]"), /**< Color captured at 2048 x 1536. */
RESOLUTION_2160P UMETA(DisplayName = "3840 x 2160 [16:9]"), /**< Color captured at 3840 x 2160. */
RESOLUTION_3072P UMETA(DisplayName = "4096 x 3072 [4:3]"), /**< Color captured at 4096 x 3072. */
};
UENUM(BlueprintType, Category = "Azure Kinect|Enums")
enum class EKinectFps : uint8
{
PER_SECOND_5 = 0 UMETA(DisplayName = "5 fps"),
PER_SECOND_15 UMETA(DisplayName = "15 fps"),
PER_SECOND_30 UMETA(DisplayName = "30 fps"),
};
UENUM(BlueprintType, Category = "Azure Kinect|Enums")
enum class EKinectRemap : uint8
{
COLOR_TO_DEPTH = 0 UMETA(DisplayName = "Color to Depth"),
DEPTH_TO_COLOR UMETA(DisplayName = "Depth to Color"),
};
/**
* Blueprintable enum defined based on k4abt_joint_id_t from k4abttypes.h
* This should always have the same enum values as k4abt_joint_id_t
*/
UENUM(BlueprintType, Category = "Azure Kinect|Enums")
enum class EKinectBodyJoint : uint8
{
PELVIS = 0 UMETA(DisplayName = "Pelvis"),
SPINE_NAVEL UMETA(DisplayName = "Spine Navel"),
SPINE_CHEST UMETA(DisplayName = "Spine Chest"),
NECK UMETA(DisplayName = "Neck"),
CLAVICLE_LEFT UMETA(DisplayName = "Clavicle Left"),
SHOULDER_LEFT UMETA(DisplayName = "Shoulder Left"),
ELBOW_LEFT UMETA(DisplayName = "Elbow Left"),
WRIST_LEFT UMETA(DisplayName = "Wrist Left"),
HAND_LEFT UMETA(DisplayName = "Hand Left"),
HANDTIP_LEFT UMETA(DisplayName = "Hand Tip Left"),
THUMB_LEFT UMETA(DisplayName = "Thumb Left"),
CLAVICLE_RIGHT UMETA(DisplayName = "Clavicle Right"),
SHOULDER_RIGHT UMETA(DisplayName = "Shoulder Right"),
ELBOW_RIGHT UMETA(DisplayName = "Elbow Right"),
WRIST_RIGHT UMETA(DisplayName = "Wrist Right"),
HAND_RIGHT UMETA(DisplayName = "Hand Right"),
HANDTIP_RIGHT UMETA(DisplayName = "Hand Tip Right"),
THUMB_RIGHT UMETA(DisplayName = "Thumb Right"),
HIP_LEFT UMETA(DisplayName = "Hip Left"),
KNEE_LEFT UMETA(DisplayName = "Knee Left"),
ANKLE_LEFT UMETA(DisplayName = "Ankle Left"),
FOOT_LEFT UMETA(DisplayName = "Foot Left"),
HIP_RIGHT UMETA(DisplayName = "Hip Right"),
KNEE_RIGHT UMETA(DisplayName = "Knee Right"),
ANKLE_RIGHT UMETA(DisplayName = "Ankle Right"),
FOOT_RIGHT UMETA(DisplayName = "Foot Right"),
HEAD UMETA(DisplayName = "Head"),
NOSE UMETA(DisplayName = "Nose"),
EYE_LEFT UMETA(DisplayName = "Eye Left"),
EAR_LEFT UMETA(DisplayName = "Ear Left"),
EYE_RIGHT UMETA(DisplayName = "Eye Right"),
EAR_RIGHT UMETA(DisplayName = "Ear Right"),
COUNT UMETA(DisplayName = "COUNT", Hidden),
};
/**
* This should always have the same enum values as k4abt_sensor_orientation_t
*/
UENUM(BlueprintType, Category = "Azure Kinect|Enums")
enum class EKinectSensorOrientation : uint8
{
DEFAULT = 0 UMETA(DisplayName = "Default"), /**< Mount the sensor at its default orientation */
CLOCKWISE90 UMETA(DisplayName = "Clockwise 90"), /**< Clockwisely rotate the sensor 90 degree */
COUNTERCLOCKWISE90 UMETA(DisplayName = "Conter-clockwise 90"), /**< Counter-clockwisely rotate the sensor 90 degrees */
FLIP180 UMETA(DisplayName = "Flip 180"), /**< Mount the sensor upside-down */
};

View File

@ -0,0 +1,40 @@
// Copyright Epic Games, Inc. All Rights Reserved.
using System.IO;
using UnrealBuildTool;
public class AzureKinectEditor : ModuleRules
{
public AzureKinectEditor(ReadOnlyTargetRules Target) : base(Target)
{
//OverridePackageType = PackageOverrideType.GameUncookedOnly;
PCHUsage = ModuleRules.PCHUsageMode.UseExplicitOrSharedPCHs;
PrivateIncludePaths.AddRange(
new string[]
{
"AzureKinectEditor/Private",
"AzureKinectEditor/Private/AnimNodes",
"AzureKinectEditor/Private/AssetTools",
"AzureKinectEditor/Private/Customizations",
"AzureKinectEditor/Private/Factories",
});
PrivateDependencyModuleNames.AddRange(
new string[]
{
"Core",
"CoreUObject",
"Engine",
"Slate",
"SlateCore",
"PropertyEditor",
"InputCore",
"AzureKinect",
"UnrealEd",
"BlueprintGraph",
"AnimGraph",
});
}
}

View File

@ -0,0 +1,38 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "AnimNodes/AnimGraphNode_AzureKinectPose.h"
#include "AnimationGraphSchema.h"
#define LOCTEXT_NAMESPACE "AzureKinectPose"
FText UAnimGraphNode_AzureKinectPose::GetNodeTitle(ENodeTitleType::Type TitleType) const
{
return LOCTEXT("AzureKinectPose", "Azure Kinect Pose");
}
FString UAnimGraphNode_AzureKinectPose::GetNodeCategory() const
{
return FString("Azure Kinect");
}
FText UAnimGraphNode_AzureKinectPose::GetTooltipText() const
{
return LOCTEXT(
"AnimGraphNode_AzureKinectPose_Tooltip",
"Process AzureKinect skeleton input into pose"
);
}
FLinearColor UAnimGraphNode_AzureKinectPose::GetNodeTitleColor() const
{
return FLinearColor(0.f, 0.f, 0.f);
}
void UAnimGraphNode_AzureKinectPose::CreateOutputPins()
{
CreatePin(EGPD_Output, UAnimationGraphSchema::PC_Struct, FComponentSpacePoseLink::StaticStruct(), TEXT("ComponentPose"));
}
#undef LOCTEXT_NAMESPACE

View File

@ -0,0 +1,31 @@
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "AnimGraphNode_Base.h"
#include "AnimNode_AzureKinectPose.h"
#include "EdGraph/EdGraphNodeUtils.h"
#include "AnimNodeEditModes.h"
#include "AnimGraphNode_AzureKinectPose.generated.h"
/**
*
*/
UCLASS(meta = (Kerwords="Azure Kinect"))
class UAnimGraphNode_AzureKinectPose : public UAnimGraphNode_Base
{
GENERATED_BODY()
public:
UPROPERTY(EditAnywhere, Category = "Settings")
FAnimNode_AzureKinectPose Node;
virtual FText GetNodeTitle(ENodeTitleType::Type TitleType) const override;
virtual FString GetNodeCategory() const override;
virtual FText GetTooltipText() const override;
virtual FLinearColor GetNodeTitleColor() const override;
virtual void CreateOutputPins() override;
};

View File

@ -0,0 +1,25 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "AzureKinectDeviceActions.h"
#include "AzureKinectDevice.h"
FText FAzureKinectDeviceActions::GetName() const
{
return NSLOCTEXT("AzureKinectDeviceActions", "AssetTypeActions_AzureKinectDevice", "Azure Kinect Device");
}
FColor FAzureKinectDeviceActions::GetTypeColor() const
{
return FColor::White;
}
UClass* FAzureKinectDeviceActions::GetSupportedClass() const
{
return UAzureKinectDevice::StaticClass();
}
uint32 FAzureKinectDeviceActions::GetCategories()
{
return EAssetTypeCategories::Media;
}

View File

@ -0,0 +1,21 @@
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "AssetTypeActions_Base.h"
class FAzureKinectDeviceActions : public FAssetTypeActions_Base
{
public:
virtual FText GetName() const override;
virtual FColor GetTypeColor() const override;
virtual UClass* GetSupportedClass() const override;
virtual uint32 GetCategories() override;
//virtual void OpenAssetEditor(const TArray<UObject*>& InObjects, TSharedPtr<class IToolkitHost> EditWithinLevelEditor = TSharedPtr<IToolkitHost>()) override;
//virtual void GetActions(const TArray<UObject*>&InObjects, FMenuBuilder & MenuBuilder) override;
//virtual bool HasActions(const TArray<UObject*>& InObjects) const override;
private:
};

View File

@ -0,0 +1,98 @@
// Copyright Epic Games, Inc. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Modules/ModuleManager.h"
#include "Modules/ModuleInterface.h"
#include "AzureKinectDevice.h"
#include "AzureKinectDeviceActions.h"
#include "AzureKinectDeviceCustomization.h"
#define LOCTEXT_NAMESPACE "FAzureKinectEditorModule"
class FAzureKinectEditorModule : public IModuleInterface
{
public:
/** IModuleInterface implementation */
virtual void StartupModule() override
{
AzureKinectDeviceName = UAzureKinectDevice::StaticClass()->GetFName();
RegisterAssetTools();
RegisterCustomizations();
}
virtual void ShutdownModule() override
{
UnregisterAssetTools();
UnregisterCustomizations();
}
protected:
/** Registers asset tool actions. */
void RegisterAssetTools()
{
IAssetTools& AssetTools = FModuleManager::LoadModuleChecked<FAssetToolsModule>("AssetTools").Get();
RegisterAssetTypeAction(AssetTools, MakeShareable(new FAzureKinectDeviceActions()));
}
/** Unregisters asset tool actions. */
void UnregisterAssetTools()
{
FAssetToolsModule* AssetToolsModule = FModuleManager::GetModulePtr<FAssetToolsModule>("AssetTools");
if (AssetToolsModule)
{
IAssetTools& AssetTools = AssetToolsModule->Get();
for (auto Action : RegisteredAssetTypeActions)
{
AssetTools.UnregisterAssetTypeActions(Action);
}
}
}
/**
* Registers a single asset type action.
*
* @param AssetTools The asset tools object to register with.
* @param Action The asset type action to register.
*/
void RegisterAssetTypeAction(IAssetTools& AssetTools, TSharedRef<IAssetTypeActions> Action)
{
AssetTools.RegisterAssetTypeActions(Action);
RegisteredAssetTypeActions.Add(Action);
}
/** Register details view customizations. */
void RegisterCustomizations()
{
FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked<FPropertyEditorModule>("PropertyEditor");
PropertyModule.RegisterCustomClassLayout(AzureKinectDeviceName, FOnGetDetailCustomizationInstance::CreateStatic(&FAzureKinectDeviceCustomization::MakeInstance));
}
void UnregisterCustomizations()
{
FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked<FPropertyEditorModule>("PropertyEditor");
PropertyModule.UnregisterCustomClassLayout(AzureKinectDeviceName);
}
private:
/** The collection of registered asset type actions. */
TArray<TSharedRef<IAssetTypeActions>> RegisteredAssetTypeActions;
FName AzureKinectDeviceName;
};
#undef LOCTEXT_NAMESPACE
IMPLEMENT_MODULE(FAzureKinectEditorModule, AzureKinectEditor)

View File

@ -0,0 +1,173 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "AzureKinectDeviceCustomization.h"
#include "DetailCategoryBuilder.h"
#include "DetailLayoutBuilder.h"
#include "DetailWidgetRow.h"
#include "Widgets/Input/SComboBox.h"
#include "PropertyCustomizationHelpers.h"
#define LOCTEXT_NAMESPACE "AzureKinectDeviceCustomization"
TSharedRef<IDetailCustomization> FAzureKinectDeviceCustomization::MakeInstance()
{
return MakeShareable(new FAzureKinectDeviceCustomization());
}
void FAzureKinectDeviceCustomization::CustomizeDetails(IDetailLayoutBuilder& DetailBuilder)
{
// Retrieve target object
TArray<TWeakObjectPtr<UObject>> Objects;
DetailBuilder.GetObjectsBeingCustomized(Objects);
if (Objects.Num() == 1)
{
AzureKinectDevice = Cast<UAzureKinectDevice>(Objects[0].Get());
}
else
{
return;
}
// Customize 'Config' category
IDetailCategoryBuilder& ConfigCategory = DetailBuilder.EditCategory("Config");
TAttribute<bool> CheckDeviceOpen = TAttribute<bool>::Create(TAttribute<bool>::FGetter::CreateLambda(
[this]() {
return !AzureKinectDevice->IsOpen();
}));
{
// Add Custom Row of Device selection
CurrentOption = AzureKinectDevice->DeviceList[0];
ConfigCategory.AddCustomRow(LOCTEXT("DeviceSelectionFilterString", "Device Selection"))
.NameContent()
[
SNew(STextBlock)
.Text(LOCTEXT("DeviceSelectionLabel", "Device Selection"))
]
.ValueContent()
[
SNew(SComboBox<TSharedPtr<FString>>)
.IsEnabled(CheckDeviceOpen)
.OptionsSource(&(AzureKinectDevice->DeviceList))
.OnSelectionChanged_Raw(this, &FAzureKinectDeviceCustomization::OnSelectionChanged)
.OnGenerateWidget_Raw(this, &FAzureKinectDeviceCustomization::MakeWidgetForOption)
.InitiallySelectedItem(CurrentOption)
[
SNew(STextBlock)
.Text(this, &FAzureKinectDeviceCustomization::GetCurrentItemLabel)
]
];
}
// Alternative of UProperty specifier: meta=(EditCondition="bOpen")
// I don't wanna make "bOpen" editable UProperty.
// Below is a workarround how to make UProperty conditional without condition (Uproperty boolean)
auto DepthMode = DetailBuilder.GetProperty(GET_MEMBER_NAME_CHECKED(UAzureKinectDevice, DepthMode));
auto ColorMode = DetailBuilder.GetProperty(GET_MEMBER_NAME_CHECKED(UAzureKinectDevice, ColorMode));
auto Fps = DetailBuilder.GetProperty(GET_MEMBER_NAME_CHECKED(UAzureKinectDevice, Fps));
auto SensorOrientation = DetailBuilder.GetProperty(GET_MEMBER_NAME_CHECKED(UAzureKinectDevice, SensorOrientation));
auto RemapMode = DetailBuilder.GetProperty(GET_MEMBER_NAME_CHECKED(UAzureKinectDevice, RemapMode));
auto SkeletonTracking = DetailBuilder.GetProperty(GET_MEMBER_NAME_CHECKED(UAzureKinectDevice, bSkeletonTracking));
ConfigCategory.AddProperty(DepthMode).IsEnabled(CheckDeviceOpen);
ConfigCategory.AddProperty(ColorMode).IsEnabled(CheckDeviceOpen);
ConfigCategory.AddProperty(Fps).IsEnabled(CheckDeviceOpen);
ConfigCategory.AddProperty(SensorOrientation).IsEnabled(CheckDeviceOpen);
ConfigCategory.AddProperty(RemapMode).IsEnabled(CheckDeviceOpen);
ConfigCategory.AddProperty(SkeletonTracking).IsEnabled(CheckDeviceOpen);
// Customize 'IO' category
IDetailCategoryBuilder& IOCategory = DetailBuilder.EditCategory("IO");
// Add Custom Row of Execution buttons
IOCategory.AddCustomRow(LOCTEXT("ButtonFilterString", "Function Buttons"))
.NameContent()
[
SNew(STextBlock)
.Text(LOCTEXT("ExecutionLabel", "Execution"))
]
.ValueContent()
[
SNew(SHorizontalBox)
+ SHorizontalBox::Slot()
.Padding(FMargin(0.f, 2.f, 10.f, 2.f))
.AutoWidth()
[
SNew(SButton)
.Text(LOCTEXT("LoadButtonText", "LoadDevice"))
.Visibility_Lambda([this]() {
return AzureKinectDevice->IsOpen() ? EVisibility::Collapsed : EVisibility::Visible;
})
.OnClicked_Lambda([this]() {
AzureKinectDevice->LoadDevices();
return FReply::Handled();
})
]
+ SHorizontalBox::Slot()
.Padding(FMargin(0.f, 2.f, 10.0f, 2.f))
.AutoWidth()
[
SNew(SButton)
.Text(LOCTEXT("StartButtonText", "StartDevice"))
.Visibility_Lambda([this]() {
return AzureKinectDevice->IsOpen() ? EVisibility::Collapsed : EVisibility::Visible;
})
.OnClicked_Lambda([this]() {
AzureKinectDevice->StartDevice();
return FReply::Handled();
})
]
+ SHorizontalBox::Slot()
.Padding(FMargin(0.f, 0, 10.f, 2.f))
.AutoWidth()
[
SNew(SButton)
.Text(LOCTEXT("StopButtonText", "StopDevice"))
.Visibility_Lambda([this]() {
return AzureKinectDevice->IsOpen() ? EVisibility::Visible : EVisibility::Collapsed;
})
.OnClicked_Lambda([this]() {
AzureKinectDevice->StopDevice();
return FReply::Handled();
})
]
];
}
TSharedRef<SWidget> FAzureKinectDeviceCustomization::MakeWidgetForOption(TSharedPtr<FString> InOption)
{
return SNew(STextBlock).Text(FText::FromString(*InOption));
}
void FAzureKinectDeviceCustomization::OnSelectionChanged(TSharedPtr<FString> NewValue, ESelectInfo::Type)
{
CurrentOption = NewValue;
// Also update UAzureKinectDevice's current index
int32 IndexOfFound = AzureKinectDevice->DeviceList.Find(NewValue);
if (IndexOfFound == INDEX_NONE)
{
AzureKinectDevice->DeviceIndex = - 1;
}
else
{
AzureKinectDevice->DeviceIndex = IndexOfFound - 1;
}
}
FText FAzureKinectDeviceCustomization::GetCurrentItemLabel() const
{
if (CurrentOption.IsValid())
{
return FText::FromString(*CurrentOption);
}
return LOCTEXT("InvalidComboEntryText", "No Device");
}
#undef LOCTEXT_NAMESPACE

View File

@ -0,0 +1,26 @@
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "IDetailCustomization.h"
#include "AzureKinectDevice.h"
/**
*
*/
class FAzureKinectDeviceCustomization : public IDetailCustomization
{
public:
static TSharedRef<IDetailCustomization> MakeInstance();
virtual void CustomizeDetails(IDetailLayoutBuilder& DetailBuilder) override;
TSharedRef<SWidget> MakeWidgetForOption(TSharedPtr<FString> InOption);
void OnSelectionChanged(TSharedPtr<FString> NewValue, ESelectInfo::Type);
FText GetCurrentItemLabel() const;
private:
TWeakObjectPtr<UAzureKinectDevice> AzureKinectDevice;
TSharedPtr<FString> CurrentOption;
};

View File

@ -0,0 +1,28 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "AzureKinectDeviceFactory.h"
#include "AzureKinectDevice.h"
#include "AssetTypeCategories.h"
UAzureKinectDeviceFactory::UAzureKinectDeviceFactory(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
bCreateNew = true;
bEditAfterNew = true;
SupportedClass = UAzureKinectDevice::StaticClass();
}
UObject* UAzureKinectDeviceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
{
return NewObject<UAzureKinectDevice>(InParent, InClass, InName, Flags);
}
bool UAzureKinectDeviceFactory::ShouldShowInNewMenu() const
{
return true;
}
uint32 UAzureKinectDeviceFactory::GetMenuCategories() const
{
return EAssetTypeCategories::Misc;
}

View File

@ -0,0 +1,23 @@
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "Factories/Factory.h"
#include "UObject/ObjectMacros.h"
#include "AzureKinectDeviceFactory.generated.h"
/**
* Factory class of UAzureKinectDevice.
* UAzureKinectDevice can be Asset Type due to this factory class.
*/
UCLASS(hidecategories=Object)
class UAzureKinectDeviceFactory : public UFactory
{
GENERATED_UCLASS_BODY()
public:
virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
virtual bool ShouldShowInNewMenu() const override;
virtual uint32 GetMenuCategories() const override;
};