diff --git a/ogre2/src/Ogre2DepthCamera.cc b/ogre2/src/Ogre2DepthCamera.cc index 1ffda3c6d..f3a9bcb38 100644 --- a/ogre2/src/Ogre2DepthCamera.cc +++ b/ogre2/src/Ogre2DepthCamera.cc @@ -909,7 +909,7 @@ void Ogre2DepthCamera::CreateDepthTexture() this->ImageWidth(), this->ImageHeight()); this->dataPtr->ogreDepthTexture[i]->setNumMipmaps(1u); this->dataPtr->ogreDepthTexture[i]->setPixelFormat( - Ogre::PFG_RGBA32_FLOAT); + Ogre::PFG_RGBA32_UINT); this->dataPtr->ogreDepthTexture[i]->scheduleTransitionTo( Ogre::GpuResidency::Resident); diff --git a/ogre2/src/media/Hlms/Terra/Metal/VertexShader_vs.metal b/ogre2/src/media/Hlms/Terra/Metal/VertexShader_vs.metal index 32af1e07c..302272d11 100644 --- a/ogre2/src/media/Hlms/Terra/Metal/VertexShader_vs.metal +++ b/ogre2/src/media/Hlms/Terra/Metal/VertexShader_vs.metal @@ -17,7 +17,7 @@ struct PS_INPUT @insertpiece( Terra_VStoPS_block ) float4 gl_Position [[position]]; @foreach( hlms_pso_clip_distances, n ) - float gl_ClipDistance@n [[clip_distance]]; + float gl_ClipDistance [[clip_distance]] [@value( hlms_pso_clip_distances )]; @end }; diff --git a/ogre2/src/media/materials/programs/GLSL/depth_camera_final_fs.glsl b/ogre2/src/media/materials/programs/GLSL/depth_camera_final_fs.glsl index b1d7e7a0a..7b37225a4 100644 --- a/ogre2/src/media/materials/programs/GLSL/depth_camera_final_fs.glsl +++ b/ogre2/src/media/materials/programs/GLSL/depth_camera_final_fs.glsl @@ -23,11 +23,10 @@ in block vec2 uv0; } inPs; -vulkan_layout( ogre_t0 ) uniform texture2D inputTexture; -vulkan( layout( ogre_s0 ) uniform sampler texSampler ); +vulkan_layout( ogre_t0 ) uniform utexture2D inputTexture; vulkan_layout( location = 0 ) -out vec4 fragColor; +out uvec4 fragColor; vulkan( layout( ogre_P0 ) uniform Params { ) uniform float near; @@ -42,14 +41,14 @@ void main() { float tolerance = 1e-6; - // Note: We use texelFetch because p.a contains an uint32 and sampling + // Note: We use PFG_RGBA32_UINT because p.a contains an uint32 and sampling // (even w/ point filtering) causes p.a to loss information (e.g. // values close to 0 get rounded to 0) // // See https://github.com/gazebosim/gz-rendering/issues/332 - vec4 p = texelFetch(vkSampler2D(inputTexture,texSampler), ivec2(inPs.uv0 *texResolution.xy), 0); + uvec4 p = texelFetch(inputTexture, ivec2(inPs.uv0 *texResolution.xy), 0); - vec3 point = p.xyz; + vec3 point = uintBitsToFloat(p.xyz); // Clamp again in case render passes changed depth values // to be outside of min/max range @@ -78,5 +77,5 @@ void main() } } - fragColor = vec4(point, p.a); + fragColor = uvec4(floatBitsToUint(point), p.a); } diff --git a/ogre2/src/media/materials/programs/GLSL/depth_camera_fs.glsl b/ogre2/src/media/materials/programs/GLSL/depth_camera_fs.glsl index 1d21b34b4..f2f925073 100644 --- a/ogre2/src/media/materials/programs/GLSL/depth_camera_fs.glsl +++ b/ogre2/src/media/materials/programs/GLSL/depth_camera_fs.glsl @@ -32,7 +32,7 @@ vulkan_layout( ogre_t3 ) uniform texture2D particleDepthTexture; vulkan( layout( ogre_s0 ) uniform sampler texSampler ); vulkan_layout( location = 0 ) -out vec4 fragColor; +out uvec4 fragColor; vulkan( layout( ogre_P0 ) uniform Params { ) uniform vec2 projectionParams; @@ -49,13 +49,13 @@ vulkan( layout( ogre_P0 ) uniform Params { ) uniform float rnd; vulkan( }; ) -float packFloat(vec4 color) +uint packUnorm4x8Gz(vec4 color) { - int rgba = (int(round(color.x * 255.0)) << 24) + - (int(round(color.y * 255.0)) << 16) + - (int(round(color.z * 255.0)) << 8) + - int(round(color.w * 255.0)); - return intBitsToFloat(rgba); + uint rgba = (uint(round(color.x * 255.0)) << 24u) + + (uint(round(color.y * 255.0)) << 16u) + + (uint(round(color.z * 255.0)) << 8u) + + uint(round(color.w * 255.0)); + return rgba; } float toSRGB( float x ) @@ -199,6 +199,5 @@ void main() // gamma correct color = toSRGB(color); - float rgba = packFloat(color); - fragColor = vec4(point, rgba); + fragColor = uvec4(floatBitsToUint(point), packUnorm4x8Gz(color)); } diff --git a/ogre2/src/media/materials/programs/Metal/depth_camera_final_fs.metal b/ogre2/src/media/materials/programs/Metal/depth_camera_final_fs.metal index 605f43fe7..605b7a7a4 100644 --- a/ogre2/src/media/materials/programs/Metal/depth_camera_final_fs.metal +++ b/ogre2/src/media/materials/programs/Metal/depth_camera_final_fs.metal @@ -34,27 +34,24 @@ struct Params float4 texResolution; }; -fragment float4 main_metal +fragment uint4 main_metal ( PS_INPUT inPs [[stage_in]], - texture2d inputTexture [[texture(0)]], - sampler inputSampler [[sampler(0)]], + texture2d inputTexture [[texture(0)]], constant Params ¶ms [[buffer(PARAMETER_SLOT)]] ) { float tolerance = 1e-6; - // Note: We use texelFetch because p.a contains an uint32 and sampling + // Note: We use PFG_RGBA32_UINT because p.a contains an uint32 and sampling // (even w/ point filtering) causes p.a to loss information (e.g. // values close to 0 get rounded to 0) // // See https://github.com/gazebosim/gz-rendering/issues/332 // Either: Metal equivalent of texelFetch - float4 p = inputTexture.read(uint2(inPs.uv0 * params.texResolution.xy), 0); - // Or: Use standard sampler - // float4 p = inputTexture.sample(inputSampler, inPs.uv0); + uint4 p = inputTexture.read(uint2(inPs.uv0 * params.texResolution.xy), 0); - float3 point = p.xyz; + float3 point = as_type(p.xyz); // Clamp again in case render passes changed depth values // to be outside of min/max range @@ -83,6 +80,6 @@ fragment float4 main_metal } } - float4 fragColor(point, p.a); + uint4 fragColor(as_type(point), p.a); return fragColor; } diff --git a/ogre2/src/media/materials/programs/Metal/depth_camera_fs.metal b/ogre2/src/media/materials/programs/Metal/depth_camera_fs.metal index 8ad5a0459..e4ef4cddd 100644 --- a/ogre2/src/media/materials/programs/Metal/depth_camera_fs.metal +++ b/ogre2/src/media/materials/programs/Metal/depth_camera_fs.metal @@ -42,13 +42,13 @@ struct Params float rnd; }; -float packFloat(float4 color) +uint packUnorm4x8Gz(float4 color) { - int rgba = (int(round(color.x * 255.0)) << 24) + - (int(round(color.y * 255.0)) << 16) + - (int(round(color.z * 255.0)) << 8) + - int(round(color.w * 255.0)); - return as_type(rgba); + uint rgba = (uint(round(color.x * 255.0)) << 24u) + + (uint(round(color.y * 255.0)) << 16u) + + (uint(round(color.z * 255.0)) << 8u) + + uint(round(color.w * 255.0)); + return rgba; } inline float toSRGB( float x ) @@ -89,7 +89,7 @@ float4 gaussrand(float2 co, float3 offsets, float mean, float stddev) return float4(Z, Z, Z, 0.0); } -fragment float4 main_metal +fragment uint4 main_metal ( PS_INPUT inPs [[stage_in]], texture2d depthTexture [[texture(0)]], @@ -204,7 +204,6 @@ fragment float4 main_metal // gamma correct color = toSRGB(color); - float rgba = packFloat(color); - float4 fragColor(point, rgba); + uint4 fragColor(as_type(point), packUnorm4x8Gz(color)); return fragColor; } diff --git a/test/integration/CMakeLists.txt b/test/integration/CMakeLists.txt index 6a5617376..96896dd72 100644 --- a/test/integration/CMakeLists.txt +++ b/test/integration/CMakeLists.txt @@ -5,6 +5,7 @@ set(tests camera depth_camera gpu_rays + heightmap lidar_visual render_pass scene diff --git a/test/integration/base64.hh b/test/integration/base64.hh new file mode 100644 index 000000000..f6c44ab74 --- /dev/null +++ b/test/integration/base64.hh @@ -0,0 +1,23 @@ +// +// base64 encoding and decoding with C++. +// Version: 1.01.00 +// +// Copyright (C) 2004-2017 René Nyffenegger +// + +#pragma once + +#include +#include + +/// \brief Takes a binary input and outputs a string in base64 +/// \param[in] _bytesToEncode binary data to encode +/// \param[in] _len length in bytes of _bytesToEncode +/// \param[out] _outBase64 String with base64-encoded of input +void Base64Encode(const void *_bytesToEncode, size_t _len, + std::string &_outBase64); + +/// \brief Takes a base64-encoded string and turns it back into binary +/// \param _s base64-encoded string +/// \return Decoded binary data +std::vector Base64Decode(const std::string &_s); diff --git a/test/integration/base64.inl b/test/integration/base64.inl new file mode 100644 index 000000000..e32293a91 --- /dev/null +++ b/test/integration/base64.inl @@ -0,0 +1,156 @@ +/* +base64.cpp and base64.h + +base64 encoding and decoding with C++. + +Version: 1.01.00 + +Copyright (C) 2004-2017 René Nyffenegger + +This source code is provided 'as-is', without any express or implied +warranty. In no event will the author be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this source code must not be misrepresented; you must not +claim that you wrote the original source code. If you use this source code +in a product, an acknowledgment in the product documentation would be +appreciated but is not required. + +2. Altered source versions must be plainly marked as such, and must not be +misrepresented as being the original source code. + +3. This notice may not be removed or altered from any source distribution. + +René Nyffenegger rene.nyffenegger@adp-gmbh.ch + +*/ + +#include "base64.hh" + +#include + +static const std::string kBase64Chars = // NOLINT + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz" + "0123456789+/"; + +static inline bool is_base64(char c) +{ + return (isalnum(c) || (c == '+') || (c == '/')); +} + +void Base64Encode(const void *_bytesToEncode, size_t _len, + std::string &_outBase64) +{ + uint8_t const *bytesToEncode = + reinterpret_cast(_bytesToEncode); + + int i = 0; + unsigned char char_array_3[3]; + unsigned char char_array_4[4]; + + while (_len--) + { + char_array_3[i++] = *(bytesToEncode++); + if (i == 3) + { + char_array_4[0] = (char_array_3[0] & 0xfc) >> 2; + char_array_4[1] = static_cast(((char_array_3[0] & 0x03) << 4) + + ((char_array_3[1] & 0xf0) >> 4)); + char_array_4[2] = static_cast(((char_array_3[1] & 0x0f) << 2) + + ((char_array_3[2] & 0xc0) >> 6)); + char_array_4[3] = char_array_3[2] & 0x3f; + + for (i = 0; (i < 4); i++) + { + _outBase64.push_back(kBase64Chars[char_array_4[i]]); + } + i = 0; + } + } + + if (i) + { + int j = 0; + for (j = i; j < 3; j++) + char_array_3[j] = '\0'; + + char_array_4[0] = (char_array_3[0] & 0xfc) >> 2; + char_array_4[1] = static_cast(((char_array_3[0] & 0x03) << 4) + + ((char_array_3[1] & 0xf0) >> 4)); + char_array_4[2] = static_cast(((char_array_3[1] & 0x0f) << 2) + + ((char_array_3[2] & 0xc0) >> 6)); + + for (j = 0; (j < i + 1); j++) + { + _outBase64.push_back(kBase64Chars[char_array_4[j]]); + } + + while ((i++ < 3)) + { + _outBase64.push_back('='); + } + } +} + +std::vector Base64Decode(const std::string &_s) +{ + int in_len = static_cast(_s.size()); + int i = 0; + size_t in_ = 0u; + uint8_t char_array_4[4], char_array_3[3]; + std::vector ret; + + while (in_len-- && (_s[in_] != '=') && is_base64(_s[in_])) + { + char_array_4[i++] = static_cast(_s[in_]); + in_++; + if (i == 4) + { + for (i = 0; i < 4; i++) + { + char_array_4[i] = static_cast( + kBase64Chars.find(static_cast(char_array_4[i]))); + } + + char_array_3[0] = static_cast((char_array_4[0] << 2) + + ((char_array_4[1] & 0x30) >> 4)); + char_array_3[1] = static_cast(((char_array_4[1] & 0xf) << 4) + + ((char_array_4[2] & 0x3c) >> 2)); + char_array_3[2] = + static_cast(((char_array_4[2] & 0x3) << 6) + char_array_4[3]); + + for (i = 0; (i < 3); i++) + { + ret.push_back(char_array_3[i]); + } + i = 0; + } + } + + if (i) + { + int j = 0; + for (j = 0; j < i; j++) + { + char_array_4[j] = static_cast(static_cast( + kBase64Chars.find(static_cast(char_array_4[j])))); + } + + char_array_3[0] = static_cast((char_array_4[0] << 2) + + ((char_array_4[1] & 0x30) >> 4)); + char_array_3[1] = static_cast(((char_array_4[1] & 0xf) << 4) + + ((char_array_4[2] & 0x3c) >> 2)); + + for (j = 0; (j < i - 1); j++) + { + ret.push_back(char_array_3[j]); + } + } + + return ret; +} diff --git a/test/integration/depth_camera.cc b/test/integration/depth_camera.cc index 7c3248a84..5e28a00cd 100644 --- a/test/integration/depth_camera.cc +++ b/test/integration/depth_camera.cc @@ -237,10 +237,7 @@ TEST_F(DepthCameraTest, GZ_UTILS_TEST_DISABLED_ON_WIN32(DepthCameraBoxes)) unsigned int ma = *mrgba >> 0 & 0xFF; EXPECT_EQ(0u, mr); EXPECT_EQ(0u, mg); -#ifndef __APPLE__ - // https://github.com/gazebosim/gz-rendering/issues/332 EXPECT_GT(mb, 0u); -#endif // Far left and right points should be red (background color) float lc = pointCloudData[pcLeft + 3]; @@ -442,11 +439,8 @@ TEST_F(DepthCameraTest, GZ_UTILS_TEST_DISABLED_ON_WIN32(DepthCameraBoxes)) unsigned int a = *rgba >> 0 & 0xFF; EXPECT_EQ(0u, r); EXPECT_EQ(0u, g); -#ifndef __APPLE__ - // https://github.com/gazebosim/gz-rendering/issues/332 EXPECT_GT(b, 0u); EXPECT_EQ(255u, a); -#endif } } } diff --git a/test/integration/heightmap.cc b/test/integration/heightmap.cc new file mode 100644 index 000000000..bf425733b --- /dev/null +++ b/test/integration/heightmap.cc @@ -0,0 +1,594 @@ +/* + * Copyright (C) 2022 Open Source Robotics Foundation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +#include + +#include "CommonRenderingTest.hh" +#include "base64.inl" + +#include +#include + +#include "gz/rendering/Camera.hh" +#include "gz/rendering/DepthCamera.hh" +#include "gz/rendering/GpuRays.hh" +#include "gz/rendering/Heightmap.hh" +#include "gz/rendering/Image.hh" +#include "gz/rendering/PixelFormat.hh" +#include "gz/rendering/Scene.hh" + +#include + +#define DOUBLE_TOL 1e-6 +static unsigned int g_pointCloudCounter = 0; + +///////////////////////////////////////////////// +static void OnNewRgbPointCloud(float *_scanDest, const float *_scan, + unsigned int _width, unsigned int _height, + unsigned int _channels, + const std::string & /*_format*/) +{ + float f; + int size = _width * _height * _channels; + memcpy(_scanDest, _scan, size * sizeof(f)); + g_pointCloudCounter++; +} + +static void OnNewGpuRaysFrame(float *_scanDest, const float *_scan, + unsigned int _width, unsigned int _height, + unsigned int _channels, + const std::string & /*_format*/) +{ + float f; + int size = _width * _height * _channels; + memcpy(_scanDest, _scan, size * sizeof(f)); +} + +using namespace gz; +using namespace rendering; + +// #define DUMP_MODE + +#ifdef DUMP_MODE +static void DumpReferenceLogToFile(const char *_base64Data) +{ + std::vector data = Base64Decode(_base64Data); + + uint32_t *headerPtr = reinterpret_cast(data.data()); + + const uint32_t width = headerPtr[0]; + const uint32_t height = headerPtr[1]; + // const PixelFormat format = static_cast(headerPtr[2]); + common::Image comImage; + comImage.SetFromData(data.data() + 3 * sizeof(uint32_t), width, height, + common::Image::RGB_INT8); + comImage.SavePNG("/tmp/Original.png"); +} + +static void DumpDepthLogToFile(const char *_base64Data) +{ + std::vector data = Base64Decode(_base64Data); + + uint32_t *headerPtr = reinterpret_cast(data.data()); + + const uint32_t width = headerPtr[0]; + const uint32_t height = headerPtr[1]; + // const PixelFormat format = static_cast(headerPtr[2]); + + const float *depthData = + reinterpret_cast(data.data() + 3 * sizeof(uint32_t)); + std::vector colourData; + colourData.reserve(width * height * 3u); + + for (uint32_t y = 0u; y < height; ++y) + { + for (uint32_t x = 0u; x < width; ++x) + { + const size_t depthIdx = (y * width + x) * 4u; + + const uint32_t *depthrgba = + reinterpret_cast(&depthData[depthIdx + 3u]); + + const uint8_t depthr = *depthrgba >> 24 & 0xFF; + const uint8_t depthg = *depthrgba >> 16 & 0xFF; + const uint8_t depthb = *depthrgba >> 8 & 0xFF; + + colourData.push_back(depthr); + colourData.push_back(depthg); + colourData.push_back(depthb); + } + } + + common::Image comImage; + comImage.SetFromData(colourData.data(), width, height, + common::Image::RGB_INT8); + comImage.SavePNG("/tmp/DepthRgbData.png"); +} +#endif + +///////////////////////////////////////////////// +class HeightmapTest : public CommonRenderingTest +{ + /// \brief Path to test media files. +public: + const std::string TEST_MEDIA_PATH{ common::joinPaths( + std::string(PROJECT_SOURCE_PATH), "test", "media") }; +}; + +static bool IsUbuntuFocal() +{ + std::ifstream inFile("/etc/os-release", std::ios::binary | std::ios::in); + + if (inFile.is_open()) + { + inFile.seekg(0, std::ios::end); + const std::streamsize sizeBytes = inFile.tellg(); + inFile.seekg(0, std::ios::beg); + + if (sizeBytes > 0 && sizeBytes < 2048) + { + std::string dataString; + dataString.resize((size_t)sizeBytes); + inFile.read(dataString.data(), sizeBytes); + + if (dataString.find("UBUNTU_CODENAME=focal") != std::string::npos) + { + return true; + } + } + } + + return false; +} + +///////////////////////////////////////////////// +TEST_F(HeightmapTest, GZ_UTILS_TEST_DISABLED_ON_WIN32(Heightmap)) +{ + // This test is too strict for ogre + CHECK_UNSUPPORTED_ENGINE("ogre"); + +#ifdef DUMP_MODE + // clang-format off + const char *colourDataBase64 = ""; + // clang-format on + DumpReferenceLogToFile(colourDataBase64); + + // clang-format off + const char *depthDataBase64 = ""; + // clang-format on + DumpDepthLogToFile(depthDataBase64); + return; +#endif + + // \todo(anyone) test fails on github action but pass on other + // ubuntu jenkins CI. Need to investigate further. + // Github action sets the MESA_GL_VERSION_OVERRIDE variable + // so check for this variable and disable test if it is set. + // + // It appears to be either a corruption bug or unsupported feature + // by old Mesa version in SW, bundled with Ubuntu Focal. + // See + // https://github.com/gazebosim/gz-rendering/pull/785#issuecomment-1360643894 +#ifdef __linux__ + std::string value; + const bool result = common::env("MESA_GL_VERSION_OVERRIDE", value, true); + if (result && value == "3.3" && IsUbuntuFocal()) + { + GTEST_SKIP() << "Test is run on machine with software rendering or mesa " + << "driver. Skipping test. " << std::endl; + } +#endif + + // add resources in build dir + engine->AddResourcePath( + common::joinPaths(std::string(PROJECT_BUILD_PATH), "src")); + + ScenePtr scene = engine->CreateScene("scene"); + ASSERT_NE(nullptr, scene); + scene->SetAmbientLight(0.3, 0.3, 0.3); + scene->SetBackgroundColor(1.0, 0.0, 0.0); + + VisualPtr root = scene->RootVisual(); + ASSERT_NE(nullptr, root); + + // create camera + CameraPtr camera = scene->CreateCamera(); + ASSERT_NE(nullptr, camera); + camera->SetImageWidth(100); + camera->SetImageHeight(100); + camera->SetHFOV(camera->HFOV()); + root->AddChild(camera); + + // create directional light + DirectionalLightPtr light = scene->CreateDirectionalLight(); + light->SetDirection(-0.5, -0.5, -1); + light->SetDiffuseColor(0.9, 0.9, 0.9); + light->SetSpecularColor(0.9, 0.9, 0.9); + root->AddChild(light); + + // create ImageHeightmap + auto data = std::make_shared(); + data->Load(common::joinPaths(TEST_MEDIA_PATH, "heightmap_bowl.png")); + + HeightmapDescriptor desc; + desc.SetName("example_bowl"); + desc.SetData(data); + desc.SetSize({ 17, 17, 7.0 }); + desc.SetSampling(2u); + desc.SetUseTerrainPaging(false); + + const auto textureImage = + common::joinPaths(TEST_MEDIA_PATH, "materials", "textures", "texture.png"); + const auto normalImage = common::joinPaths(TEST_MEDIA_PATH, "materials", + "textures", "flat_normal.png"); + + HeightmapTexture textureA; + textureA.SetSize(1.0); + textureA.SetDiffuse(textureImage); + textureA.SetNormal(normalImage); + desc.AddTexture(textureA); + + HeightmapBlend blendA; + blendA.SetMinHeight(2.0); + blendA.SetFadeDistance(5.0); + desc.AddBlend(blendA); + + HeightmapTexture textureB; + textureB.SetSize(1.0); + textureB.SetDiffuse(textureImage); + textureB.SetNormal(normalImage); + desc.AddTexture(textureB); + + HeightmapBlend blendB; + blendB.SetMinHeight(4.0); + blendB.SetFadeDistance(5.0); + desc.AddBlend(blendB); + + HeightmapTexture textureC; + textureC.SetSize(1.0); + textureC.SetDiffuse(textureImage); + textureC.SetNormal(normalImage); + desc.AddTexture(textureC); + + auto heightmapGeom = scene->CreateHeightmap(desc); + + auto vis = scene->CreateVisual(); + vis->AddGeometry(heightmapGeom); + root->AddChild(vis); + + // create green material + MaterialPtr green = scene->CreateMaterial(); + green->SetDiffuse(0.0, 0.7, 0.0); + green->SetSpecular(0.5, 0.5, 0.5); + + // create box + VisualPtr box = scene->CreateVisual(); + box->AddGeometry(scene->CreateBox()); + box->SetLocalPosition(0.5, 0.5, 5.5); + box->Scale(1.0); + box->SetMaterial(green); + root->AddChild(box); + + camera->SetLocalPosition(-0.802621, 5.84365, 9.67877); + camera->SetLocalRotation(0.0, 0.588, -1.125); + + DepthCameraPtr depthCamera = scene->CreateDepthCamera(); + depthCamera->SetImageWidth(camera->ImageWidth()); + depthCamera->SetImageHeight(camera->ImageHeight()); + depthCamera->SetHFOV(camera->HFOV()); + depthCamera->SetNearClipPlane(camera->NearClipPlane()); + depthCamera->SetFarClipPlane(camera->FarClipPlane()); + depthCamera->CreateDepthTexture(); + depthCamera->SetLocalPosition(camera->LocalPosition()); + depthCamera->SetLocalRotation(camera->LocalRotation()); + root->AddChild(depthCamera); + + unsigned int pointCloudChannelCount = 4u; + float *pointCloudData = + new float[depthCamera->ImageHeight() * depthCamera->ImageWidth() * + pointCloudChannelCount]; + common::ConnectionPtr connection = depthCamera->ConnectNewRgbPointCloud( + std::bind(&::OnNewRgbPointCloud, pointCloudData, std::placeholders::_1, + std::placeholders::_2, std::placeholders::_3, + std::placeholders::_4, std::placeholders::_5)); + + // capture original image with box (no noise) + Image normalCamImage = camera->CreateImage(); + camera->Capture(normalCamImage); + + g_pointCloudCounter = 0u; + depthCamera->Update(); + EXPECT_EQ(1u, g_pointCloudCounter); + + { + int numErrors = 0; + int numLargeErrors = 0; + int accumError = 0; + + const unsigned char *normalData = + static_cast(normalCamImage.Data()); + const float *depthData = pointCloudData; + const unsigned int height = camera->ImageHeight(); + const unsigned int width = camera->ImageWidth(); + const unsigned int channelCount = 4; + const unsigned int step = width * channelCount; + const unsigned int normalChannelCount = 3; + const unsigned int normalStep = width * normalChannelCount; + for (unsigned int i = 0; i < height; ++i) + { + for (unsigned int j = 0; j < width; ++j) + { + const unsigned int idx = i * step + j * channelCount; + const unsigned int normalIdx = i * normalStep + j * normalChannelCount; + + const uint32_t *depthrgba = + reinterpret_cast(&depthData[idx + 3]); + + const uint8_t depthr = *depthrgba >> 24 & 0xFF; + const uint8_t depthg = *depthrgba >> 16 & 0xFF; + const uint8_t depthb = *depthrgba >> 8 & 0xFF; + // const uint8_t deptha = *depthrgba >> 0 & 0xFF; + + const uint8_t largeError = 5u; + + if (abs(depthr - normalData[normalIdx + 0]) > largeError || + abs(depthg - normalData[normalIdx + 1]) > largeError || + abs(depthb - normalData[normalIdx + 2]) > largeError) + { + const uint8_t error = 9u; + EXPECT_NEAR(depthr, normalData[normalIdx + 0], error); + EXPECT_NEAR(depthg, normalData[normalIdx + 1], error); + EXPECT_NEAR(depthb, normalData[normalIdx + 2], error); + ++numLargeErrors; + } + else + { + const uint8_t error = 4u; + EXPECT_NEAR(depthr, normalData[normalIdx + 0], error); + EXPECT_NEAR(depthg, normalData[normalIdx + 1], error); + EXPECT_NEAR(depthb, normalData[normalIdx + 2], error); + // EXPECT_EQ(deptha, normalData[normalIdx + 3]); + } + + if (depthr != normalData[normalIdx + 0] || + depthg != normalData[normalIdx + 1] || + depthb != normalData[normalIdx + 2]) + { + accumError += abs(depthr - normalData[normalIdx + 0]); + accumError += abs(depthg - normalData[normalIdx + 1]); + accumError += abs(depthb - normalData[normalIdx + 2]); + ++numErrors; + } + + /// Background is red + const bool isBackgroundNormal = normalData[normalIdx + 0] == 255u && + normalData[normalIdx + 1] == 0u && + normalData[normalIdx + 2] == 0u; + const bool isBackgroundDepth = + depthr == 255u && depthg == 0u && depthb == 0u; + + EXPECT_EQ(isBackgroundNormal, isBackgroundDepth); + + if (isBackgroundDepth) + { + EXPECT_FALSE(std::isnan(depthData[idx + 0])); + EXPECT_FALSE(std::isnan(depthData[idx + 1])); + EXPECT_FALSE(std::isnan(depthData[idx + 2])); + + EXPECT_TRUE(std::isinf(depthData[idx + 0])); + EXPECT_TRUE(std::isinf(depthData[idx + 1])); + EXPECT_TRUE(std::isinf(depthData[idx + 2])); + + // The sky should only be visible in the top part of the picture + EXPECT_TRUE(i < height / 4); + } + else + { + EXPECT_FALSE(std::isnan(depthData[idx + 0])); + EXPECT_FALSE(std::isnan(depthData[idx + 1])); + EXPECT_FALSE(std::isnan(depthData[idx + 2])); + + EXPECT_FALSE(std::isinf(depthData[idx + 0])); + EXPECT_FALSE(std::isinf(depthData[idx + 1])); + EXPECT_FALSE(std::isinf(depthData[idx + 2])); + } + } + } + + // Expect less than 15 pixels in 10k to be different due to GPU & + // floating point differences when optimizing shaders + EXPECT_LE(numErrors, width * height * 15 / 10000); + // Expect less than an accumulated deviation of 25 per channel (RGB) + EXPECT_LE(accumError, 25 * 3); + // Expect very few "large" errors. + EXPECT_LE(numLargeErrors, width * height * 5 / 10000); + + if (this->HasFailure()) + { + std::string base64Encoded; + + { + // Output reference + base64Encoded.clear(); + const uint32_t header[3] = { + width, height, static_cast(normalCamImage.Format()) + }; + + Base64Encode(header, sizeof(header), base64Encoded); + Base64Encode(normalData, width * height * normalChannelCount, + base64Encoded); + std::cout << "Reference Camera Output:" << std::endl; + std::cout << base64Encoded << std::endl; + } + + { + // Output value + base64Encoded.clear(); + const uint32_t header[3] = { width, height, + static_cast(PF_FLOAT32_RGBA) }; + + std::cout << "Depth Camera Output:" << std::endl; + Base64Encode(header, sizeof(header), base64Encoded); + Base64Encode(pointCloudData, width * height * sizeof(float) * 4u, + base64Encoded); + + std::cout << base64Encoded << std::endl; + } + } + } + + // cleanup + connection.reset(); + + // Clean up + engine->DestroyScene(scene); + + delete[] pointCloudData; + pointCloudData = nullptr; +} + +///////////////////////////////////////////////// +TEST_F(HeightmapTest, GZ_UTILS_TEST_DISABLED_ON_WIN32(HeightmapGpuRays)) +{ + // ogre fails with lidar. + // See https://github.com/gazebosim/gz-rendering/issues/35 + CHECK_UNSUPPORTED_ENGINE("ogre"); + + // Test GPU rays heightmap detection + const double hMinAngle = -GZ_PI / 8.0; + const double hMaxAngle = GZ_PI / 8.0; + const double minRange = 1.0; + const double maxRange = 100.0; + const int hRayCount = 20; + const int vRayCount = 1; + + ScenePtr scene = engine->CreateScene("scene"); + ASSERT_TRUE(scene != nullptr); + + VisualPtr root = scene->RootVisual(); + + // Create ray caster oriented to look down at the heightmap + math::Pose3d testPose(math::Vector3d(0, 0, 20), + math::Quaterniond(math::Vector3d(0, GZ_PI / 2, 0))); + + GpuRaysPtr gpuRays = scene->CreateGpuRays("gpu_rays_1"); + gpuRays->SetWorldPosition(testPose.Pos()); + gpuRays->SetWorldRotation(testPose.Rot()); + gpuRays->SetNearClipPlane(minRange); + gpuRays->SetFarClipPlane(maxRange); + gpuRays->SetAngleMin(hMinAngle); + gpuRays->SetAngleMax(hMaxAngle); + gpuRays->SetRayCount(hRayCount); + // set visibility mask + // note this is not the same as GZ_VISIBILITY_MASK + // which is 0x0FFFFFFF + gpuRays->SetVisibilityMask(0xFFFFFFFF); + + gpuRays->SetVerticalRayCount(vRayCount); + root->AddChild(gpuRays); + + // create heightmap + + // Heightmap data + auto heightImage = common::joinPaths(TEST_MEDIA_PATH, "heightmap_bowl.png"); + math::Vector3d size{ 100, 100, 10 }; + math::Vector3d position{ 0, 0, 0 }; + auto textureImage = + common::joinPaths(TEST_MEDIA_PATH, "materials", "textures", "texture.png"); + auto normalImage = common::joinPaths(TEST_MEDIA_PATH, "materials", "textures", + "flat_normal.png"); + + auto data = std::make_shared(); + data->Load(heightImage); + + EXPECT_EQ(heightImage, data->Filename()); + + HeightmapDescriptor desc; + desc.SetData(data); + desc.SetSize(size); + desc.SetPosition(position); + desc.SetUseTerrainPaging(true); + desc.SetSampling(4u); + + HeightmapTexture textureA; + textureA.SetSize(0.5); + textureA.SetDiffuse(textureImage); + textureA.SetNormal(normalImage); + desc.AddTexture(textureA); + + HeightmapBlend blendA; + blendA.SetMinHeight(2.0); + blendA.SetFadeDistance(5.0); + desc.AddBlend(blendA); + + HeightmapTexture textureB; + textureB.SetSize(0.5); + textureB.SetDiffuse(textureImage); + textureB.SetNormal(normalImage); + desc.AddTexture(textureB); + + HeightmapBlend blendB; + blendB.SetMinHeight(4.0); + blendB.SetFadeDistance(5.0); + desc.AddBlend(blendB); + + HeightmapTexture textureC; + textureC.SetSize(0.5); + textureC.SetDiffuse(textureImage); + textureC.SetNormal(normalImage); + desc.AddTexture(textureC); + + auto heightmap = scene->CreateHeightmap(desc); + ASSERT_NE(nullptr, heightmap); + + // Add to a visual + auto vis = scene->CreateVisual(); + vis->AddGeometry(heightmap); + EXPECT_EQ(1u, vis->GeometryCount()); + EXPECT_TRUE(vis->HasGeometry(heightmap)); + EXPECT_EQ(heightmap, vis->GeometryByIndex(0)); + scene->RootVisual()->AddChild(vis); + + // Verify rays caster range readings + // listen to new gpu rays frames + unsigned int channels = gpuRays->Channels(); + float *scan = new float[hRayCount * vRayCount * channels]; + common::ConnectionPtr connection = gpuRays->ConnectNewGpuRaysFrame(std::bind( + &::OnNewGpuRaysFrame, scan, std::placeholders::_1, std::placeholders::_2, + std::placeholders::_3, std::placeholders::_4, std::placeholders::_5)); + + scene->SetTime(scene->Time() + std::chrono::milliseconds(16)); + gpuRays->Update(); + + for (unsigned int i = 0; i < hRayCount * channels; i += channels) + { + // range readings should not be inf and far lower than the max range + // it should be between ~15m and 20m + double range = scan[i]; + EXPECT_LT(14.9, range); + EXPECT_GT(20.0, range); + } + + // cleanup + connection.reset(); + + delete[] scan; + scan = nullptr; + + // Clean up + engine->DestroyScene(scene); +}