diff --git a/include/neural-graphics-primitives/camera_path.h b/include/neural-graphics-primitives/camera_path.h index 16a33e7db85937c39d2858f6e8d1f811ba393ae7..492cd8385a6998bd5a7c45cf9078abd7289857d1 100644 --- a/include/neural-graphics-primitives/camera_path.h +++ b/include/neural-graphics-primitives/camera_path.h @@ -21,6 +21,7 @@ #include <imgui/imgui.h> #include <imguizmo/ImGuizmo.h> +#include <chrono> #include <vector> struct ImDrawList; @@ -68,29 +69,59 @@ CameraKeyframe lerp(const CameraKeyframe& p0, const CameraKeyframe& p1, float t, CameraKeyframe spline(float t, const CameraKeyframe& p0, const CameraKeyframe& p1, const CameraKeyframe& p2, const CameraKeyframe& p3); struct CameraPath { - std::vector<CameraKeyframe> m_keyframes; - bool m_update_cam_from_path = false; - float m_playtime = 0.f; - float m_autoplayspeed = 0.f; - // If m_loop is set true, the last frame set will be more like "next to last," + std::vector<CameraKeyframe> keyframes; + bool update_cam_from_path = false; + float play_time = 0.f; + float auto_play_speed = 0.f; + // If loop is set true, the last frame set will be more like "next to last," // with animation then returning back to the first frame, making a continuous loop. // Note that the user does not have to (and should not normally) duplicate the first frame to be the last frame. - bool m_loop = false; + bool loop = false; + + struct RenderSettings { + Eigen::Vector2i resolution = {1920, 1080}; + int spp = 8; + float fps = 60.0f; + float duration_seconds = 5.0f; + float shutter_fraction = 0.5f; + int quality = 10; + + uint32_t n_frames() const { + return (uint32_t)((double)duration_seconds * fps); + } + + float frame_seconds() const { + return 1.0f / (duration_seconds * fps); + } + + float frame_milliseconds() const { + return 1000.0f / (duration_seconds * fps); + } + + std::string filename = "video.mp4"; + }; + + RenderSettings render_settings; + bool rendering = false; + uint32_t render_frame_idx = 0; + std::chrono::time_point<std::chrono::steady_clock> render_start_time; + + Eigen::Matrix<float, 3, 4> render_frame_end_camera; const CameraKeyframe& get_keyframe(int i) { - if (m_loop) { - int size = (int)m_keyframes.size(); + if (loop) { + int size = (int)keyframes.size(); // add size to ensure no negative value is generated by modulo - return m_keyframes[(i + size) % size]; + return keyframes[(i + size) % size]; } else { - return m_keyframes[tcnn::clamp(i, 0, (int)m_keyframes.size()-1)]; + return keyframes[tcnn::clamp(i, 0, (int)keyframes.size()-1)]; } } CameraKeyframe eval_camera_path(float t) { - if (m_keyframes.empty()) + if (keyframes.empty()) return {}; // make room for last frame == first frame when looping - t *= (float)(m_loop ? m_keyframes.size() : m_keyframes.size()-1); + t *= (float)(loop ? keyframes.size() : keyframes.size()-1); int t1 = (int)floorf(t); return spline(t-floorf(t), get_keyframe(t1-1), get_keyframe(t1), get_keyframe(t1+1), get_keyframe(t1+2)); } diff --git a/include/neural-graphics-primitives/nerf_loader.h b/include/neural-graphics-primitives/nerf_loader.h index d86388163fcd9deea3c23a74e4969975a0fe2565..426d99bce841b7faa64995a41dbb611245867522 100644 --- a/include/neural-graphics-primitives/nerf_loader.h +++ b/include/neural-graphics-primitives/nerf_loader.h @@ -164,8 +164,9 @@ struct NerfDataset { void nerf_ray_to_ngp(Ray& ray, bool scale_direction = false) { ray.o = ray.o * scale + offset; - if (scale_direction) + if (scale_direction) { ray.d *= scale; + } float tmp = ray.o[0]; ray.o[0] = ray.o[1]; diff --git a/include/neural-graphics-primitives/testbed.h b/include/neural-graphics-primitives/testbed.h index d0ba0a6ee09f65840693fe6f24a5802530ffc00e..aaac120531b1c72ec303d4fd75d3308c9c076b40 100644 --- a/include/neural-graphics-primitives/testbed.h +++ b/include/neural-graphics-primitives/testbed.h @@ -23,6 +23,7 @@ #include <neural-graphics-primitives/render_buffer.h> #include <neural-graphics-primitives/sdf.h> #include <neural-graphics-primitives/shared_queue.h> +#include <neural-graphics-primitives/thread_pool.h> #include <neural-graphics-primitives/trainable_buffer.cuh> #include <tiny-cuda-nn/multi_stream.h> @@ -66,6 +67,8 @@ public: Testbed(ETestbedMode mode, const std::string& data_path) : Testbed(mode) { load_training_data(data_path); } Testbed(ETestbedMode mode, const std::string& data_path, const std::string& network_config_path) : Testbed(mode, data_path) { reload_network_from_file(network_config_path); } Testbed(ETestbedMode mode, const std::string& data_path, const nlohmann::json& network_config) : Testbed(mode, data_path) { reload_network_from_json(network_config); } + + bool clear_tmp_dir(); void load_training_data(const std::string& data_path); void clear_training_data(); @@ -354,6 +357,7 @@ public: template <typename T> void dump_parameters_as_images(const T* params, const std::string& filename_base); + void prepare_next_camera_path_frame(); void imgui(); void training_prep_nerf(uint32_t batch_size, cudaStream_t stream); void training_prep_sdf(uint32_t batch_size, cudaStream_t stream); @@ -523,6 +527,8 @@ public: std::vector<std::shared_ptr<GLTexture>> m_render_textures; #endif + ThreadPool m_thread_pool; + std::vector<std::future<void>> m_render_futures; std::vector<CudaRenderBuffer> m_render_surfaces; std::unique_ptr<CudaRenderBuffer> m_pip_render_surface; diff --git a/src/camera_path.cu b/src/camera_path.cu index 281544880f8fb8fc40913285c73564afeb8288e8..2a550f65bb99ac799c576c2b104b49a00787fe48 100644 --- a/src/camera_path.cu +++ b/src/camera_path.cu @@ -61,8 +61,8 @@ CameraKeyframe spline(float t, const CameraKeyframe& p0, const CameraKeyframe& p return lerp(r0, r1, t, 0.f, 1.f); } else { // cubic bspline - float tt=t*t; - float ttt=t*t*t; + float tt = t*t; + float ttt = t*t*t; float a = (1-t)*(1-t)*(1-t)*(1.f/6.f); float b = (3.f*ttt-6.f*tt+4.f)*(1.f/6.f); float c = (-3.f*ttt+3.f*tt+3.f*t+1.f)*(1.f/6.f); @@ -109,14 +109,15 @@ void from_json(bool is_first, const json& j, CameraKeyframe& p, const CameraKeyf void CameraPath::save(const std::string& filepath_string) { json j = { - {"time", m_playtime}, - {"path", m_keyframes} + {"loop", loop}, + {"time", play_time}, + {"path", keyframes}, }; std::ofstream f(filepath_string); f << j; } -void CameraPath::load(const std::string& filepath_string, const Eigen::Matrix<float, 3, 4> &first_xform) { +void CameraPath::load(const std::string& filepath_string, const Eigen::Matrix<float, 3, 4>& first_xform) { std::ifstream f(filepath_string); if (!f) { throw std::runtime_error{fmt::format("Camera path {} does not exist.", filepath_string)}; @@ -127,80 +128,31 @@ void CameraPath::load(const std::string& filepath_string, const Eigen::Matrix<fl CameraKeyframe first; - m_keyframes.clear(); - if (j.contains("time")) m_playtime=j["time"]; - if (j.contains("path")) for (auto &el : j["path"]) { + keyframes.clear(); + if (j.contains("loop")) loop = j["loop"]; + if (j.contains("time")) play_time = j["time"]; + if (j.contains("path")) for (auto& el : j["path"]) { CameraKeyframe p; - bool is_first = m_keyframes.empty(); + bool is_first = keyframes.empty(); from_json(is_first, el, p, first, first_xform); if (is_first) { first = p; } - m_keyframes.push_back(p); + keyframes.push_back(p); } } #ifdef NGP_GUI int CameraPath::imgui(char path_filename_buf[128], float frame_milliseconds, Matrix<float, 3, 4>& camera, float slice_plane_z, float scale, float fov, float aperture_size, float bounding_radius, const Eigen::Matrix<float, 3, 4>& first_xform, int glow_mode, float glow_y_cutoff) { - int n=std::max(0,int(m_keyframes.size())-1); - int read= 0; // 1=smooth, 2=hard - if (!m_keyframes.empty()) { - if (ImGui::SliderFloat("camera path time", &m_playtime, 0.f, 1.f)) read=1; - ImGui::SliderFloat("auto play speed",&m_autoplayspeed, 0.f, 1.f); - if (m_autoplayspeed>0.f && m_playtime<1.f) { - m_playtime+=m_autoplayspeed*(frame_milliseconds/1000.f); - if (m_playtime>1.f) m_playtime=1.f; - read=1; - } - } - if (ImGui::Button("Add from cam")) { - int i=(int)ceil(m_playtime*(float)n+0.001f); - if (i>m_keyframes.size()) i=m_keyframes.size(); - if (i<0) i=0; - m_keyframes.insert(m_keyframes.begin()+i, CameraKeyframe(camera, slice_plane_z, scale, fov, aperture_size, glow_mode, glow_y_cutoff)); - m_update_cam_from_path = false; - int n=std::max(0,int(m_keyframes.size())-1); - m_playtime = n ? float(i)/float(n) : 1.f; - read = 2; - } - if (!m_keyframes.empty()) { - ImGui::SameLine(); - if (ImGui::Button("split")) { - m_update_cam_from_path=false; - int i=(int)ceil(m_playtime*(float)n+0.001f); - if (i>m_keyframes.size()) i=(int)m_keyframes.size(); - if (i<0) i=0; - m_keyframes.insert(m_keyframes.begin()+i, eval_camera_path(m_playtime)); - m_playtime=float(i)/float(n+1); - read=2; - } - ImGui::SameLine(); - int i=(int)round(m_playtime*(float)n); - if (ImGui::Button("|<")) { m_playtime=0.f; read=2; } ImGui::SameLine(); - if (ImGui::Button("<")) { m_playtime=n?std::max(0.f,floorf((m_playtime-0.0001f)*(float)n)/(float)n):0.f; read=2;} ImGui::SameLine(); - if (ImGui::Button(m_update_cam_from_path ? "STOP" : "READ")) { m_update_cam_from_path=!m_update_cam_from_path; read=2; } ImGui::SameLine(); - if (ImGui::Button(">")) { m_playtime=n?std::min(1.f,ceilf((m_playtime+0.0001f)*(float)n)/(float)n):1.f; read=2;} ImGui::SameLine(); - if (ImGui::Button(">|")) { m_playtime=1.f; read=2;} ImGui::SameLine(); - if (ImGui::Button("Dup")) { m_update_cam_from_path=false; m_keyframes.insert(m_keyframes.begin()+i, m_keyframes[i]); m_playtime=i/float(n+1); read=2;} ImGui::SameLine(); - if (ImGui::Button("Del")) { m_update_cam_from_path=false; m_keyframes.erase(m_keyframes.begin()+i); read=2;} ImGui::SameLine(); - if (ImGui::Button("Set")) { m_keyframes[i]=CameraKeyframe(camera, slice_plane_z, scale, fov, aperture_size, glow_mode, glow_y_cutoff); read=2; if (n) m_playtime=i/float(n); } - - if (ImGui::RadioButton("Translate", m_gizmo_op == ImGuizmo::TRANSLATE)) - m_gizmo_op = ImGuizmo::TRANSLATE; - ImGui::SameLine(); - if (ImGui::RadioButton("Rotate", m_gizmo_op == ImGuizmo::ROTATE)) - m_gizmo_op = ImGuizmo::ROTATE; - ImGui::SameLine(); - if (ImGui::RadioButton("Local", m_gizmo_mode == ImGuizmo::LOCAL)) - m_gizmo_mode = ImGuizmo::LOCAL; - ImGui::SameLine(); - if (ImGui::RadioButton("World", m_gizmo_mode == ImGuizmo::WORLD)) - m_gizmo_mode = ImGuizmo::WORLD; - } + int n = std::max(0, int(keyframes.size()) - 1); + int read = 0; // 1=smooth, 2=hard ImGui::InputText("##PathFile", path_filename_buf, 128); ImGui::SameLine(); static std::string camera_path_load_error_string = ""; + + if (rendering) { ImGui::BeginDisabled(); } + if (ImGui::Button("Load")) { try { load(path_filename_buf, first_xform); @@ -209,6 +161,9 @@ int CameraPath::imgui(char path_filename_buf[128], float frame_milliseconds, Mat camera_path_load_error_string = std::string{"Failed to load camera path: "} + e.what(); } } + + if (rendering) { ImGui::EndDisabled(); } + if (ImGui::BeginPopupModal("Camera path load error", NULL, ImGuiWindowFlags_AlwaysAutoResize)) { ImGui::Text("%s", camera_path_load_error_string.c_str()); if (ImGui::Button("OK", ImVec2(120, 0))) { @@ -216,28 +171,99 @@ int CameraPath::imgui(char path_filename_buf[128], float frame_milliseconds, Mat } ImGui::EndPopup(); } - if (!m_keyframes.empty()) { + + if (!keyframes.empty()) { ImGui::SameLine(); - if (ImGui::Button("Save")) + if (ImGui::Button("Save")) { save(path_filename_buf); + } + } + + if (rendering) { ImGui::BeginDisabled(); } + + if (ImGui::Button("Add from cam")) { + int i = (int)ceil(play_time * (float)n + 0.001f); + if (i > keyframes.size()) { i = keyframes.size(); } + if (i < 0) i = 0; + keyframes.insert(keyframes.begin() + i, CameraKeyframe(camera, slice_plane_z, scale, fov, aperture_size, glow_mode, glow_y_cutoff)); + update_cam_from_path = false; + int n = std::max(0, int(keyframes.size()) - 1); + play_time = n ? float(i) / float(n) : 1.0f; + read = 2; } - if (!m_keyframes.empty()) { - int i=(int)round(m_playtime*(float)n); + + if (!keyframes.empty()) { + ImGui::SameLine(); + if (ImGui::Button("Split")) { + update_cam_from_path = false; + int i = (int)ceil(play_time * (float)n + 0.001f); + if (i > keyframes.size()) { i = (int)keyframes.size(); } + if (i < 0) { i = 0; } + keyframes.insert(keyframes.begin() + i, eval_camera_path(play_time)); + play_time = float(i) / float(n + 1); + read = 2; + } + ImGui::SameLine(); + int i=(int)round(play_time * (float)n); + if (ImGui::Button("|<")) { play_time = 0.f; read = 2; } + ImGui::SameLine(); + if (ImGui::Button("<")) { play_time = n ? std::max(0.0f, floorf((play_time - 0.0001f) * (float)n) / (float)n) : 0.f; read = 2; } + ImGui::SameLine(); + if (ImGui::Button(update_cam_from_path ? "Stop" : "Read")) { update_cam_from_path = !update_cam_from_path; read = 2; } + ImGui::SameLine(); + if (ImGui::Button(">")) { play_time = n ? std::min(1.0f, ceilf((play_time + 0.0001f) * (float)n) / (float)n) : 1.0f; read = 2; } + ImGui::SameLine(); + if (ImGui::Button(">|")) { play_time = 1.0f; read = 2; } + ImGui::SameLine(); + if (ImGui::Button("Dup")) { update_cam_from_path = false; keyframes.insert(keyframes.begin() + i, keyframes[i]); play_time = i / float(n + 1); read = 2; } + ImGui::SameLine(); + if (ImGui::Button("Del")) { update_cam_from_path = false; keyframes.erase(keyframes.begin() + i); read = 2; } + ImGui::SameLine(); + if (ImGui::Button("Set")) { keyframes[i] = CameraKeyframe(camera, slice_plane_z, scale, fov, aperture_size, glow_mode, glow_y_cutoff); read = 2; if (n) play_time = i / float(n); } + + if (ImGui::RadioButton("Translate", m_gizmo_op == ImGuizmo::TRANSLATE)) { m_gizmo_op = ImGuizmo::TRANSLATE; } + ImGui::SameLine(); + if (ImGui::RadioButton("Rotate", m_gizmo_op == ImGuizmo::ROTATE)) { m_gizmo_op = ImGuizmo::ROTATE; } + ImGui::SameLine(); + if (ImGui::RadioButton("Local", m_gizmo_mode == ImGuizmo::LOCAL)) { m_gizmo_mode = ImGuizmo::LOCAL; } + ImGui::SameLine(); + if (ImGui::RadioButton("World", m_gizmo_mode == ImGuizmo::WORLD)) { m_gizmo_mode = ImGuizmo::WORLD; } + ImGui::SameLine(); + ImGui::Checkbox("Loop path", &loop); + + if (ImGui::SliderFloat("Camera path time", &play_time, 0.0f, 1.0f)) { read = 1; } + + ImGui::SliderFloat("Auto play speed", &auto_play_speed, 0.0f, 1.0f); + if (auto_play_speed > 0.0f && play_time < 1.0f) { + play_time += auto_play_speed * (frame_milliseconds / 1000.f); + if (play_time >= 1.0f) { + play_time = 1.0f; + } + + read = 1; + } + ImGui::Text("Current keyframe %d/%d:", i, n+1); - if (ImGui::SliderFloat("Field of view", &m_keyframes[i].fov, 0.0f, 120.0f)) read=2; - if (ImGui::SliderFloat("Aperture size", &m_keyframes[i].aperture_size, 0.0f, 0.1f)) read=2; - if (ImGui::SliderFloat("Slice Z", &m_keyframes[i].slice, -bounding_radius, bounding_radius)) read=2; - if (ImGui::SliderFloat("Scale", &m_keyframes[i].scale, 0.f,10.f)) read=2; - if (ImGui::SliderInt("Glow Mode", &m_keyframes[i].glow_mode, 0,16)) read=2; - if (ImGui::SliderFloat("Glow Y Cutoff", &m_keyframes[i].glow_y_cutoff, -2.f,3.f)) read=2; + if (ImGui::SliderFloat("Field of view", &keyframes[i].fov, 0.0f, 120.0f)) read = 2; + if (ImGui::SliderFloat("Aperture size", &keyframes[i].aperture_size, 0.0f, 0.1f)) read = 2; + if (ImGui::SliderFloat("Slice Z", &keyframes[i].slice, -bounding_radius, bounding_radius)) read = 2; + if (ImGui::SliderFloat("Scale", &keyframes[i].scale, 0.f,10.f)) read = 2; + if (ImGui::SliderInt("Glow Mode", &keyframes[i].glow_mode, 0,16)) read = 2; + if (ImGui::SliderFloat("Glow Y Cutoff", &keyframes[i].glow_y_cutoff, -2.f,3.f)) read = 2; } - return m_keyframes.empty() ? 0 : read; + + if (rendering) { ImGui::EndDisabled(); } + + return keyframes.empty() ? 0 : read; } -bool DebugProject(const Matrix<float, 4, 4>&proj, Vector3f p, ImVec2& o) { +bool debug_project(const Matrix<float, 4, 4>& proj, Vector3f p, ImVec2& o) { Vector4f ph; ph << p, 1.f; Vector4f pa = proj * ph; - if (pa.w() <= 0.f) return false; + if (pa.w() <= 0.f) { + return false; + } + o.x = pa.x() / pa.w(); o.y = pa.y() / pa.w(); return true; @@ -245,43 +271,43 @@ bool DebugProject(const Matrix<float, 4, 4>&proj, Vector3f p, ImVec2& o) { void add_debug_line(ImDrawList* list, const Matrix<float, 4, 4>& proj, Vector3f a, Vector3f b, uint32_t col, float thickness) { ImVec2 aa, bb; - if (DebugProject(proj, a, aa) && DebugProject(proj, b, bb)) { + if (debug_project(proj, a, aa) && debug_project(proj, b, bb)) { list->AddLine(aa, bb, col, thickness); } } void visualize_unit_cube(ImDrawList* list, const Matrix<float, 4, 4>& world2proj, const Vector3f& a, const Vector3f& b, const Matrix3f& render_aabb_to_local) { Eigen::Matrix3f m = render_aabb_to_local.transpose(); - add_debug_line(list, world2proj, m * Vector3f{a.x(),a.y(),a.z()}, m * Vector3f{a.x(),a.y(),b.z()}, 0xffff4040); // Z - add_debug_line(list, world2proj, m * Vector3f{b.x(),a.y(),a.z()}, m * Vector3f{b.x(),a.y(),b.z()}, 0xffffffff); - add_debug_line(list, world2proj, m * Vector3f{a.x(),b.y(),a.z()}, m * Vector3f{a.x(),b.y(),b.z()}, 0xffffffff); - add_debug_line(list, world2proj, m * Vector3f{b.x(),b.y(),a.z()}, m * Vector3f{b.x(),b.y(),b.z()}, 0xffffffff); - - add_debug_line(list, world2proj, m * Vector3f{a.x(),a.y(),a.z()}, m * Vector3f{b.x(),a.y(),a.z()}, 0xff4040ff); // X - add_debug_line(list, world2proj, m * Vector3f{a.x(),b.y(),a.z()}, m * Vector3f{b.x(),b.y(),a.z()}, 0xffffffff); - add_debug_line(list, world2proj, m * Vector3f{a.x(),a.y(),b.z()}, m * Vector3f{b.x(),a.y(),b.z()}, 0xffffffff); - add_debug_line(list, world2proj, m * Vector3f{a.x(),b.y(),b.z()}, m * Vector3f{b.x(),b.y(),b.z()}, 0xffffffff); - - add_debug_line(list, world2proj, m * Vector3f{a.x(),a.y(),a.z()}, m * Vector3f{a.x(),b.y(),a.z()}, 0xff40ff40); // Y - add_debug_line(list, world2proj, m * Vector3f{b.x(),a.y(),a.z()}, m * Vector3f{b.x(),b.y(),a.z()}, 0xffffffff); - add_debug_line(list, world2proj, m * Vector3f{a.x(),a.y(),b.z()}, m * Vector3f{a.x(),b.y(),b.z()}, 0xffffffff); - add_debug_line(list, world2proj, m * Vector3f{b.x(),a.y(),b.z()}, m * Vector3f{b.x(),b.y(),b.z()}, 0xffffffff); + add_debug_line(list, world2proj, m * Vector3f{a.x(), a.y(), a.z()}, m * Vector3f{a.x(), a.y(), b.z()}, 0xffff4040); // Z + add_debug_line(list, world2proj, m * Vector3f{b.x(), a.y(), a.z()}, m * Vector3f{b.x(), a.y(), b.z()}, 0xffffffff); + add_debug_line(list, world2proj, m * Vector3f{a.x(), b.y(), a.z()}, m * Vector3f{a.x(), b.y(), b.z()}, 0xffffffff); + add_debug_line(list, world2proj, m * Vector3f{b.x(), b.y(), a.z()}, m * Vector3f{b.x(), b.y(), b.z()}, 0xffffffff); + + add_debug_line(list, world2proj, m * Vector3f{a.x(), a.y(), a.z()}, m * Vector3f{b.x(), a.y(), a.z()}, 0xff4040ff); // X + add_debug_line(list, world2proj, m * Vector3f{a.x(), b.y(), a.z()}, m * Vector3f{b.x(), b.y(), a.z()}, 0xffffffff); + add_debug_line(list, world2proj, m * Vector3f{a.x(), a.y(), b.z()}, m * Vector3f{b.x(), a.y(), b.z()}, 0xffffffff); + add_debug_line(list, world2proj, m * Vector3f{a.x(), b.y(), b.z()}, m * Vector3f{b.x(), b.y(), b.z()}, 0xffffffff); + + add_debug_line(list, world2proj, m * Vector3f{a.x(), a.y(), a.z()}, m * Vector3f{a.x(), b.y(), a.z()}, 0xff40ff40); // Y + add_debug_line(list, world2proj, m * Vector3f{b.x(), a.y(), a.z()}, m * Vector3f{b.x(), b.y(), a.z()}, 0xffffffff); + add_debug_line(list, world2proj, m * Vector3f{a.x(), a.y(), b.z()}, m * Vector3f{a.x(), b.y(), b.z()}, 0xffffffff); + add_debug_line(list, world2proj, m * Vector3f{b.x(), a.y(), b.z()}, m * Vector3f{b.x(), b.y(), b.z()}, 0xffffffff); } void visualize_nerf_camera(ImDrawList* list, const Matrix<float, 4, 4>& world2proj, const Eigen::Matrix<float, 3, 4>& xform, float aspect, uint32_t col, float thickness) { const float axis_size = 0.025f; - const Vector3f *xforms = (const Vector3f*)&xform; + const Vector3f* xforms = (const Vector3f*)&xform; Vector3f pos = xforms[3]; add_debug_line(list, world2proj, pos, pos+axis_size*xforms[0], 0xff4040ff, thickness); add_debug_line(list, world2proj, pos, pos+axis_size*xforms[1], 0xff40ff40, thickness); add_debug_line(list, world2proj, pos, pos+axis_size*xforms[2], 0xffff4040, thickness); - float xs=axis_size*aspect; - float ys=axis_size; - float zs=axis_size*2.f*aspect; - Vector3f a=pos+xs*xforms[0]+ys*xforms[1]+zs*xforms[2]; - Vector3f b=pos-xs*xforms[0]+ys*xforms[1]+zs*xforms[2]; - Vector3f c=pos-xs*xforms[0]-ys*xforms[1]+zs*xforms[2]; - Vector3f d=pos+xs*xforms[0]-ys*xforms[1]+zs*xforms[2]; + float xs = axis_size * aspect; + float ys = axis_size; + float zs = axis_size * 2.0f * aspect; + Vector3f a = pos + xs * xforms[0] + ys * xforms[1] + zs * xforms[2]; + Vector3f b = pos - xs * xforms[0] + ys * xforms[1] + zs * xforms[2]; + Vector3f c = pos - xs * xforms[0] - ys * xforms[1] + zs * xforms[2]; + Vector3f d = pos + xs * xforms[0] - ys * xforms[1] + zs * xforms[2]; add_debug_line(list, world2proj, pos, a, col, thickness); add_debug_line(list, world2proj, pos, b, col, thickness); add_debug_line(list, world2proj, pos, c, col, thickness); @@ -293,57 +319,57 @@ void visualize_nerf_camera(ImDrawList* list, const Matrix<float, 4, 4>& world2pr } bool CameraPath::imgui_viz(ImDrawList* list, Matrix<float, 4, 4> &view2proj, Matrix<float, 4, 4> &world2proj, Matrix<float, 4, 4> &world2view, Vector2f focal, float aspect) { - bool changed=false; + bool changed = false; float flx = focal.x(); float fly = focal.y(); Matrix<float, 4, 4> view2proj_guizmo; float zfar = 100.f; float znear = 0.1f; view2proj_guizmo << - fly*2.f/aspect, 0, 0, 0, - 0, -fly*2.f, 0, 0, - 0, 0, (zfar+znear)/(zfar-znear), -(2.f*zfar*znear) / (zfar-znear), + fly * 2.0f / aspect, 0, 0, 0, + 0, -fly * 2.0f, 0, 0, + 0, 0, (zfar + znear) / (zfar - znear), -(2.0f * zfar * znear) / (zfar - znear), 0, 0, 1, 0; - if (!m_update_cam_from_path) { + if (!update_cam_from_path) { ImDrawList* list = ImGui::GetForegroundDrawList(); - int cur_cam_i=(int)round(m_playtime * (float)(m_keyframes.size()-1)); + int cur_cam_i=(int)round(play_time * (float)(keyframes.size()-1)); Eigen::Vector3f prevp; - for (int i=0;i<m_keyframes.size();++i) { - visualize_nerf_camera(list, world2proj, m_keyframes[i].m(), aspect, (i==cur_cam_i) ? 0xff80c0ff : 0x8080c0ff); - Eigen::Vector3f p=m_keyframes[i].T; + for (int i = 0; i < keyframes.size(); ++i) { + visualize_nerf_camera(list, world2proj, keyframes[i].m(), aspect, (i==cur_cam_i) ? 0xff80c0ff : 0x8080c0ff); + Eigen::Vector3f p = keyframes[i].T; if (i) { add_debug_line(list, world2proj, prevp, p, 0xccffc040); } - prevp=p; + prevp = p; } - if (!m_keyframes.empty()) { + if (!keyframes.empty()) { ImGuiIO& io = ImGui::GetIO(); - Eigen::Matrix4f matrix=Eigen::Matrix4f::Identity(); - matrix.block<3,4>(0,0) = m_keyframes[cur_cam_i].m(); + Eigen::Matrix4f matrix = Eigen::Matrix4f::Identity(); + matrix.block<3,4>(0, 0) = keyframes[cur_cam_i].m(); ImGuizmo::SetRect(0, 0, io.DisplaySize.x, io.DisplaySize.y); if (ImGuizmo::Manipulate((const float*)&world2view, (const float*)&view2proj_guizmo, (ImGuizmo::OPERATION)m_gizmo_op, (ImGuizmo::MODE)m_gizmo_mode, (float*)&matrix, NULL, NULL)) { - int i0=cur_cam_i; while (i0>0 && m_keyframes[cur_cam_i].SamePosAs(m_keyframes[i0-1])) i0--; - int i1=cur_cam_i; while (i1<m_keyframes.size()-1 && m_keyframes[cur_cam_i].SamePosAs(m_keyframes[i1+1])) i1++; - for (int i=i0;i<=i1;++i) { - m_keyframes[i].T=matrix.block<3,4>(0,0).col(3); - m_keyframes[i].R=Eigen::Quaternionf(matrix.block<3,3>(0,0)).coeffs(); + int i0 = cur_cam_i; while (i0 > 0 && keyframes[cur_cam_i].SamePosAs(keyframes[i0 - 1])) i0--; + int i1 = cur_cam_i; while (i1 < keyframes.size() - 1 && keyframes[cur_cam_i].SamePosAs(keyframes[i1 + 1])) i1++; + for (int i = i0; i <= i1; ++i) { + keyframes[i].T = matrix.block<3, 4>(0, 0).col(3); + keyframes[i].R = Eigen::Quaternionf(matrix.block<3, 3>(0, 0)).coeffs(); } changed=true; } - visualize_nerf_camera(list, world2proj, eval_camera_path(m_playtime).m(), aspect, 0xff80ff80); - float dt = 0.05f / (float)m_keyframes.size(); + visualize_nerf_camera(list, world2proj, eval_camera_path(play_time).m(), aspect, 0xff80ff80); + float dt = 0.05f / (float)keyframes.size(); Eigen::Vector3f prevp; - for (float t=0.f;;t+=dt) { - if (t>1.f) t=1.f; - Eigen::Vector3f p=eval_camera_path(t).T; + for (float t = 0.0f;; t += dt) { + if (t > 1.0f) t = 1.0f; + Eigen::Vector3f p = eval_camera_path(t).T; if (t) { // draw a line - add_debug_line(list, world2proj, (prevp+p)*0.5f, p, 0xff80c0ff); + add_debug_line(list, world2proj, (prevp+p) * 0.5f, p, 0xff80c0ff); } - prevp=p; - if (t>=1.f) break; + prevp = p; + if (t >= 1.0f) break; } } } diff --git a/src/marching_cubes.cu b/src/marching_cubes.cu index d794d63548651c2cbad5d7dfb4fe711a7aa09ee8..7bc2d28c3e201a5e131737c38c8a24cd663c62b0 100644 --- a/src/marching_cubes.cu +++ b/src/marching_cubes.cu @@ -866,7 +866,7 @@ void save_mesh( free(tex); } - FILE* f = fopen(outputname,"wb"); + FILE* f = fopen(outputname, "wb"); if (!f) { throw std::runtime_error{"Failed to open " + std::string(outputname) + " for writing."}; } @@ -893,30 +893,35 @@ void save_mesh( , (unsigned int)cpuverts.size() , (unsigned int)cpuindices.size()/3 ); + for (size_t i=0;i<cpuverts.size();++i) { Vector3f p=(cpuverts[i]-nerf_offset)/nerf_scale; Vector3f c=cpucolors[i]; Vector3f n=cpunormals[i].normalized(); unsigned char c8[3]={(unsigned char)tcnn::clamp(c.x()*255.f,0.f,255.f),(unsigned char)tcnn::clamp(c.y()*255.f,0.f,255.f),(unsigned char)tcnn::clamp(c.z()*255.f,0.f,255.f)}; - fprintf(f,"%0.5f %0.5f %0.5f %0.3f %0.3f %0.3f %d %d %d\n", p.x(), p.y(), p.z(), n.x(), n.y(), n.z(), c8[0], c8[1], c8[2]); + fprintf(f, "%0.5f %0.5f %0.5f %0.3f %0.3f %0.3f %d %d %d\n", p.x(), p.y(), p.z(), n.x(), n.y(), n.z(), c8[0], c8[1], c8[2]); } + for (size_t i=0;i<cpuindices.size();i+=3) { - fprintf(f,"3 %d %d %d\n", cpuindices[i+2], cpuindices[i+1], cpuindices[i+0]); + fprintf(f, "3 %d %d %d\n", cpuindices[i+2], cpuindices[i+1], cpuindices[i+0]); } } else { // obj file if (unwrap_it) { fprintf(f, "mtllib nerf.mtl\n"); } + for (size_t i = 0; i < cpuverts.size(); ++i) { Vector3f p = (cpuverts[i]-nerf_offset)/nerf_scale; Vector3f c = cpucolors[i]; - fprintf(f,"v %0.5f %0.5f %0.5f %0.3f %0.3f %0.3f\n", p.x(), p.y(), p.z(), tcnn::clamp(c.x(), 0.f, 1.f), tcnn::clamp(c.y(), 0.f, 1.f), tcnn::clamp(c.z(), 0.f, 1.f)); + fprintf(f, "v %0.5f %0.5f %0.5f %0.3f %0.3f %0.3f\n", p.x(), p.y(), p.z(), tcnn::clamp(c.x(), 0.f, 1.f), tcnn::clamp(c.y(), 0.f, 1.f), tcnn::clamp(c.z(), 0.f, 1.f)); } + for (auto &v: cpunormals) { auto n = v.normalized(); - fprintf(f,"vn %0.5f %0.5f %0.5f\n", n.x(), n.y(), n.z()); + fprintf(f, "vn %0.5f %0.5f %0.5f\n", n.x(), n.y(), n.z()); } + if (unwrap_it) { for (size_t i = 0; i < cpuindices.size(); i++) { uint32_t q = (uint32_t)(i/6); @@ -931,8 +936,9 @@ void save_mesh( case 4: x += 3+d; break; case 5: x += 3+d; y += d; break; } - fprintf(f,"vt %0.5f %0.5f\n", ((float)x+0.5f)/float(texw), 1.f-((float)y+0.5f)/float(texh)); + fprintf(f, "vt %0.5f %0.5f\n", ((float)x+0.5f)/float(texw), 1.f-((float)y+0.5f)/float(texh)); } + fprintf(f, "g default\nusemtl nerf\ns 1\n"); for (size_t i = 0; i < cpuindices.size(); i += 3) { fprintf(f,"f %u/%u/%u %u/%u/%u %u/%u/%u\n", @@ -943,7 +949,7 @@ void save_mesh( } } else { for (size_t i = 0; i < cpuindices.size(); i += 3) { - fprintf(f,"f %u//%u %u//%u %u//%u\n", + fprintf(f, "f %u//%u %u//%u %u//%u\n", cpuindices[i+2]+1, cpuindices[i+2]+1, cpuindices[i+1]+1, cpuindices[i+1]+1, cpuindices[i+0]+1, cpuindices[i+0]+1 ); } diff --git a/src/python_api.cu b/src/python_api.cu index bd95aa2ccf0c0f2a429ca18d5a037e025f3ef328..f0e086ac553521447fb36bce22deadfc06e5439f 100644 --- a/src/python_api.cu +++ b/src/python_api.cu @@ -132,6 +132,7 @@ py::array_t<float> Testbed::render_to_cpu(int width, int height, int spp, bool l if (end_time < 0.f) { end_time = start_time; } + bool path_animation_enabled = start_time >= 0.f; if (!path_animation_enabled) { // the old code disabled camera smoothing for non-path renders; so we preserve that behaviour m_smoothed_camera = m_camera; @@ -146,6 +147,7 @@ py::array_t<float> Testbed::render_to_cpu(int width, int height, int spp, bool l set_camera_from_time(start_time); m_smoothed_camera = m_camera; } + auto start_cam_matrix = m_smoothed_camera; // now set up the end-of-frame camera matrix if we are moving along a path @@ -153,14 +155,15 @@ py::array_t<float> Testbed::render_to_cpu(int width, int height, int spp, bool l set_camera_from_time(end_time); apply_camera_smoothing(1000.f / fps); } + auto end_cam_matrix = m_smoothed_camera; for (int i = 0; i < spp; ++i) { float start_alpha = ((float)i)/(float)spp * shutter_fraction; float end_alpha = ((float)i + 1.0f)/(float)spp * shutter_fraction; - auto sample_start_cam_matrix = log_space_lerp(start_cam_matrix, end_cam_matrix, start_alpha); - auto sample_end_cam_matrix = log_space_lerp(start_cam_matrix, end_cam_matrix, end_alpha); + auto sample_start_cam_matrix = start_cam_matrix; + auto sample_end_cam_matrix = log_space_lerp(start_cam_matrix, end_cam_matrix, shutter_fraction); if (path_animation_enabled) { set_camera_from_time(start_time + (end_time-start_time) * (start_alpha + end_alpha) / 2.0f); @@ -184,21 +187,6 @@ py::array_t<float> Testbed::render_to_cpu(int width, int height, int spp, bool l return result; } -py::array_t<float> Testbed::render_with_rolling_shutter_to_cpu(const Eigen::Matrix<float, 3, 4>& camera_transform_start, const Eigen::Matrix<float, 3, 4>& camera_transform_end, const Eigen::Vector4f& rolling_shutter, int width, int height, int spp, bool linear) { - m_windowless_render_surface.resize({width, height}); - m_windowless_render_surface.reset_accumulation(); - for (int i = 0; i < spp; ++i) { - if (m_autofocus) { - autofocus(); - } - render_frame(m_nerf.training.dataset.nerf_matrix_to_ngp(camera_transform_start), m_nerf.training.dataset.nerf_matrix_to_ngp(camera_transform_end), rolling_shutter, m_windowless_render_surface, !linear); - } - py::array_t<float> result({height, width, 4}); - py::buffer_info buf = result.request(); - CUDA_CHECK_THROW(cudaMemcpy2DFromArray(buf.ptr, width * sizeof(float) * 4, m_windowless_render_surface.surface_provider().array(), 0, 0, width * sizeof(float) * 4, height, cudaMemcpyDeviceToHost)); - return result; -} - #ifdef NGP_GUI py::array_t<float> Testbed::screenshot(bool linear) const { std::vector<float> tmp(m_window_res.prod() * 4); @@ -376,15 +364,6 @@ PYBIND11_MODULE(pyngp, m) { py::arg("fps") = 30.f, py::arg("shutter_fraction") = 1.0f ) - .def("render_with_rolling_shutter", &Testbed::render_with_rolling_shutter_to_cpu, "Renders an image at the requested resolution. Does not require a window. Supports rolling shutter, with per ray time being computed as A+B*u+C*v+D*t for [A,B,C,D]", - py::arg("transform_matrix_start"), - py::arg("transform_matrix_end"), - py::arg("rolling_shutter") = Eigen::Vector4f::Zero(), - py::arg("width") = 1920, - py::arg("height") = 1080, - py::arg("spp") = 1, - py::arg("linear") = true - ) .def("destroy_window", &Testbed::destroy_window, "Destroy the window again.") .def("train", &Testbed::train, py::call_guard<py::gil_scoped_release>(), "Perform a specified number of training steps.") .def("reset", &Testbed::reset_network, py::arg("reset_density_grid") = true, "Reset training.") diff --git a/src/testbed.cu b/src/testbed.cu index 5a54c5385c0d1c2b9eb5d38c44bb2bbfab47be1e..e3b03cbc1e0f313a83117bab6ce8616d286e59ab 100644 --- a/src/testbed.cu +++ b/src/testbed.cu @@ -38,22 +38,27 @@ #include <filesystem/directory.h> #include <filesystem/path.h> +#include <stb_image/stb_image.h> +#include <stb_image/stb_image_write.h> + #include <fstream> #include <set> +#include <unordered_set> #ifdef NGP_GUI # include <imgui/imgui.h> # include <imgui/backends/imgui_impl_glfw.h> # include <imgui/backends/imgui_impl_opengl3.h> # include <imguizmo/ImGuizmo.h> -# include <stb_image/stb_image.h> # ifdef _WIN32 # include <GL/gl3w.h> # else # include <GL/glew.h> # endif # include <GLFW/glfw3.h> +# include <GLFW/glfw3native.h> # include <cuda_gl_interop.h> + #endif // Windows.h is evil @@ -68,6 +73,15 @@ using namespace std::literals::chrono_literals; using namespace tcnn; namespace fs = filesystem; +int do_system(const std::string& cmd) { +#ifdef _WIN32 + tlog::info() << "> " << cmd; +#else + tlog::info() << "$ " << cmd; +#endif + return system(cmd.c_str()); +} + NGP_NAMESPACE_BEGIN std::atomic<size_t> g_total_n_bytes_allocated{0}; @@ -284,7 +298,15 @@ void Testbed::load_file(const std::string& file_path) { // If the dragged file isn't any of the above, assume that it's training data try { + bool was_training_data_available = m_training_data_available; load_training_data(file_path); + + if (!was_training_data_available) { + // If we previously didn't have any training data and only now dragged + // some into the window, it is very unlikely that the user doesn't + // want to immediately start training on that data. So: go for it. + m_train = true; + } } catch (std::runtime_error& e) { tlog::error() << "Failed to load training data: " << e.what(); } @@ -388,9 +410,11 @@ void Testbed::reset_camera() { 0.0f, -1.0f, 0.0f, 0.5f, 0.0f, 0.0f, -1.0f, 0.5f; m_camera.col(3) -= m_scale * view_dir(); + m_smoothed_camera = m_camera; m_up_dir = {0.0f, 1.0f, 0.0f}; m_sun_dir = Vector3f::Ones().normalized(); + reset_accumulation(); } @@ -557,32 +581,124 @@ bool imgui_colored_button(const char *name, float hue) { } void Testbed::imgui() { + // If a GUI interaction causes an error, write that error to the following string and call + // ImGui::OpenPopup("Error"); + static std::string imgui_error_string = ""; + m_picture_in_picture_res = 0; - if (int read = ImGui::Begin("Camera path", 0, ImGuiWindowFlags_NoScrollbar)) { - static char path_filename_buf[128] = ""; - if (path_filename_buf[0] == '\0') { - snprintf(path_filename_buf, sizeof(path_filename_buf), "%s", get_filename_in_data_path_with_suffix(m_data_path, m_network_config_path, "_cam.json").c_str()); + if (ImGui::Begin("Camera path", 0, ImGuiWindowFlags_NoScrollbar)) { + if (ImGui::CollapsingHeader("Path manipulation", ImGuiTreeNodeFlags_DefaultOpen)) { + static char path_filename_buf[128] = ""; + if (path_filename_buf[0] == '\0') { + snprintf(path_filename_buf, sizeof(path_filename_buf), "%s", get_filename_in_data_path_with_suffix(m_data_path, m_network_config_path, "_cam.json").c_str()); + } + + if (int read = m_camera_path.imgui( + path_filename_buf, + m_render_ms.val(), + m_camera, + m_slice_plane_z, + m_scale, + fov(), + m_aperture_size, + m_bounding_radius, + !m_nerf.training.dataset.xforms.empty() ? m_nerf.training.dataset.xforms[0].start : Matrix<float, 3, 4>::Identity(), + m_nerf.glow_mode, + m_nerf.glow_y_cutoff + )) { + if (!m_camera_path.rendering) { + reset_accumulation(true); + + if (m_camera_path.update_cam_from_path) { + set_camera_from_time(m_camera_path.play_time); + + // A value of larger than 1 indicates that the camera path wants + // to override camera smoothing. + if (read > 1) { + m_smoothed_camera = m_camera; + } + } else { + m_pip_render_surface->reset_accumulation(); + } + } + } + + if (!m_camera_path.keyframes.empty()) { + float w = ImGui::GetContentRegionAvail().x; + if (m_camera_path.update_cam_from_path) { + m_picture_in_picture_res = 0; + ImGui::Image((ImTextureID)(size_t)m_render_textures.front()->texture(), ImVec2(w, w * 9.0f / 16.0f)); + } else { + m_picture_in_picture_res = (float)std::min((int(w)+31)&(~31), 1920/4); + ImGui::Image((ImTextureID)(size_t)m_pip_render_texture->texture(), ImVec2(w, w * 9.0f / 16.0f)); + } + } } - if (m_camera_path.imgui(path_filename_buf, m_render_ms.val(), m_camera, m_slice_plane_z, m_scale, fov(), m_aperture_size, m_bounding_radius, !m_nerf.training.dataset.xforms.empty() ? m_nerf.training.dataset.xforms[0].start : Matrix<float, 3, 4>::Identity(), m_nerf.glow_mode, m_nerf.glow_y_cutoff)) { - if (m_camera_path.m_update_cam_from_path) { - set_camera_from_time(m_camera_path.m_playtime); - if (read > 1) { + if (!m_camera_path.keyframes.empty() && ImGui::CollapsingHeader("Export video", ImGuiTreeNodeFlags_DefaultOpen)) { + // Render a video + if (imgui_colored_button(m_camera_path.rendering ? "Abort rendering" : "Render video", 0.4)) { + m_camera_path.rendering = !m_camera_path.rendering; + + if (!clear_tmp_dir()) { + imgui_error_string = "Failed to clear temporary directory 'tmp' to hold rendered images."; + ImGui::OpenPopup("Error"); + + m_camera_path.rendering = false; + } + + if (m_camera_path.rendering) { + m_camera_path.render_start_time = std::chrono::steady_clock::now(); + m_camera_path.update_cam_from_path = true; + m_camera_path.play_time = 0.0f; + m_camera_path.auto_play_speed = 1.0f; + m_camera_path.render_frame_idx = 0; + + m_dlss = false; + m_train = false; + + reset_accumulation(true); + set_camera_from_time(m_camera_path.play_time); m_smoothed_camera = m_camera; + } else { + m_camera_path.play_time = 0.0f; + m_camera_path.auto_play_speed = 0.0f; } } - m_pip_render_surface->reset_accumulation(); - reset_accumulation(true); - } - if (!m_camera_path.m_keyframes.empty()) { - float w = ImGui::GetContentRegionAvail().x; - m_picture_in_picture_res = (float)std::min((int(w)+31)&(~31),1920/4); - if (m_camera_path.m_update_cam_from_path) { - ImGui::Image((ImTextureID)(size_t)m_render_textures.front()->texture(), ImVec2(w,w*9.f/16.f)); - } else { - ImGui::Image((ImTextureID)(size_t)m_pip_render_texture->texture(), ImVec2(w,w*9.f/16.f)); + if (m_camera_path.rendering) { + ImGui::SameLine(); + + auto elapsed = std::chrono::steady_clock::now() - m_camera_path.render_start_time; + + uint32_t progress = m_camera_path.render_frame_idx * m_camera_path.render_settings.spp + m_render_surfaces.front().spp(); + uint32_t goal = m_camera_path.render_settings.n_frames() * m_camera_path.render_settings.spp; + auto est_remaining = elapsed * (float)(goal - progress) / std::max(progress, 1u); + + ImGui::Text("%s", fmt::format( + "Frame {}/{}, Elapsed: {}, Remaining: {}", + m_camera_path.render_frame_idx+1, + m_camera_path.render_settings.n_frames(), + tlog::durationToString(std::chrono::steady_clock::now() - m_camera_path.render_start_time), + tlog::durationToString(est_remaining) + ).c_str()); } + + if (m_camera_path.rendering) { ImGui::BeginDisabled(); } + + static char video_filename_buf[1024] = "video.mp4"; + ImGui::InputText("File##Video file path", video_filename_buf, sizeof(video_filename_buf)); + m_camera_path.render_settings.filename = video_filename_buf; + + ImGui::InputInt2("Resolution", &m_camera_path.render_settings.resolution.x()); + ImGui::InputFloat("Duration (seconds)", &m_camera_path.render_settings.duration_seconds); + ImGui::InputFloat("FPS (frames/second)", &m_camera_path.render_settings.fps); + ImGui::InputInt("SPP (samples/pixel)", &m_camera_path.render_settings.spp); + ImGui::SliderInt("Quality", &m_camera_path.render_settings.quality, 0, 10); + + ImGui::SliderFloat("Shutter fraction", &m_camera_path.render_settings.shutter_fraction, 0.0f, 1.0f); + + if (m_camera_path.rendering) { ImGui::EndDisabled(); } } } ImGui::End(); @@ -709,7 +825,7 @@ void Testbed::imgui() { ImGui::TreePop(); } - if (m_testbed_mode == ETestbedMode::Volume && ImGui::CollapsingHeader("Volume training options")) { + if (m_testbed_mode == ETestbedMode::Volume && ImGui::TreeNode("Volume training options")) { accum_reset |= ImGui::SliderFloat("Albedo", &m_volume.albedo, 0.f, 1.f); accum_reset |= ImGui::SliderFloat("Scattering", &m_volume.scattering, -2.f, 2.f); accum_reset |= ImGui::SliderFloat("Distance scale", &m_volume.inv_distance_scale, 1.f, 100.f, "%.3g", ImGuiSliderFlags_Logarithmic | ImGuiSliderFlags_NoRoundToFormat); @@ -896,11 +1012,13 @@ void Testbed::imgui() { "Sphere Traced Mesh\0" "SDF Bricks\0" ); + if (m_sdf.groundtruth_mode == ESDFGroundTruthMode::SDFBricks) { - accum_reset |= ImGui::SliderInt("Brick Octree Level", (int*)&m_sdf.brick_level, 1, 10); - accum_reset |= ImGui::Checkbox("Brick Normals track Octree Level", &m_sdf.brick_smooth_normals); - accum_reset |= ImGui::SliderInt("Brick Quantize Bits", (int*)&m_sdf.brick_quantise_bits, 0, 16); + accum_reset |= ImGui::SliderInt("Brick octree Level", (int*)&m_sdf.brick_level, 1, 10); + accum_reset |= ImGui::Checkbox("Brick normals track octree Level", &m_sdf.brick_smooth_normals); + accum_reset |= ImGui::SliderInt("Brick quantize Bits", (int*)&m_sdf.brick_quantise_bits, 0, 16); } + accum_reset |= ImGui::Checkbox("Analytic normals", &m_sdf.analytic_normals); accum_reset |= ImGui::SliderFloat("Normals epsilon", &m_sdf.fd_normals_epsilon, 0.00001f, 0.1f, "%.6g", ImGuiSliderFlags_Logarithmic); @@ -987,11 +1105,11 @@ void Testbed::imgui() { ImGui::PlotLines("Training view exposures", exposures.data(), exposures.size(), 0, nullptr, FLT_MAX, FLT_MAX, ImVec2(0, 60.f)); } - if (ImGui::SliderInt("glow mode", &m_nerf.glow_mode, 0, 16)) { + if (ImGui::SliderInt("Glow mode", &m_nerf.glow_mode, 0, 16)) { accum_reset = true; } - if (m_nerf.glow_mode && ImGui::SliderFloat("glow pos", &m_nerf.glow_y_cutoff, -2.f, 3.f)) { + if (m_nerf.glow_mode && ImGui::SliderFloat("Glow height", &m_nerf.glow_y_cutoff, -2.f, 3.f)) { accum_reset = true; } } @@ -1017,9 +1135,7 @@ void Testbed::imgui() { } ImGui::Checkbox("First person controls", &m_fps_camera); - ImGui::SameLine(); ImGui::Checkbox("Smooth camera motion", &m_camera_smoothing); - ImGui::SameLine(); ImGui::Checkbox("Autofocus", &m_autofocus); if (ImGui::TreeNode("Advanced camera settings")) { @@ -1088,8 +1204,8 @@ void Testbed::imgui() { ImGui::InputTextMultiline("Params", buf, sizeof(buf)); ImGui::TreePop(); } - } + if (ImGui::CollapsingHeader("Snapshot")) { static char snapshot_filename_buf[128] = ""; if (snapshot_filename_buf[0] == '\0') { @@ -1102,33 +1218,26 @@ void Testbed::imgui() { save_snapshot(snapshot_filename_buf, m_include_optimizer_state_in_snapshot); } ImGui::SameLine(); - static std::string snapshot_load_error_string = ""; if (ImGui::Button("Load")) { try { load_snapshot(snapshot_filename_buf); } catch (std::exception& e) { - ImGui::OpenPopup("Snapshot load error"); - snapshot_load_error_string = std::string{"Failed to load snapshot: "} + e.what(); + imgui_error_string = fmt::format("Failed to load snapshot: {}", e.what()); + ImGui::OpenPopup("Error"); } } ImGui::SameLine(); if (ImGui::Button("Dump parameters as images")) { dump_parameters_as_images(m_trainer->params(), "params"); } - if (ImGui::BeginPopupModal("Snapshot load error", NULL, ImGuiWindowFlags_AlwaysAutoResize)) { - ImGui::Text("%s", snapshot_load_error_string.c_str()); - if (ImGui::Button("OK", ImVec2(120, 0))) { - ImGui::CloseCurrentPopup(); - } - ImGui::EndPopup(); - } + ImGui::SameLine(); - ImGui::Checkbox("w/ Optimizer State", &m_include_optimizer_state_in_snapshot); + ImGui::Checkbox("w/ optimizer state", &m_include_optimizer_state_in_snapshot); ImGui::InputText("File##Snapshot file path", snapshot_filename_buf, sizeof(snapshot_filename_buf)); } if (m_testbed_mode == ETestbedMode::Nerf || m_testbed_mode == ETestbedMode::Sdf) { - if (ImGui::CollapsingHeader("Marching Cubes Mesh Output")) { + if (ImGui::CollapsingHeader("Export mesh / volume / slices")) { static bool flip_y_and_z_axes = false; static float density_range = 4.f; BoundingBox aabb = (m_testbed_mode == ETestbedMode::Nerf) ? m_render_aabb : m_aabb; @@ -1154,7 +1263,7 @@ void Testbed::imgui() { ImGui::SameLine(); - if (imgui_colored_button("Save density PNG",-0.4f)) { + if (imgui_colored_button("Save density PNG", -0.7f)) { Testbed::compute_and_save_png_slices(m_data_path.str().c_str(), m_mesh.res, {}, m_mesh.thresh, density_range, flip_y_and_z_axes); } @@ -1239,7 +1348,7 @@ void Testbed::imgui() { m_sdf.brdf.ambientcolor = (m_background_color * m_background_color).head<3>(); } - if (ImGui::CollapsingHeader("Histograms of trainable encoding parameters")) { + if (ImGui::CollapsingHeader("Histograms of encoding parameters")) { ImGui::Checkbox("Gather histograms", &m_gather_histograms); static float minlevel = 0.f; @@ -1282,6 +1391,14 @@ void Testbed::imgui() { } } + if (ImGui::BeginPopupModal("Error", NULL, ImGuiWindowFlags_AlwaysAutoResize)) { + ImGui::Text("%s", imgui_error_string.c_str()); + if (ImGui::Button("OK", ImVec2(120, 0))) { + ImGui::CloseCurrentPopup(); + } + ImGui::EndPopup(); + } + if (accum_reset) { reset_accumulation(); } @@ -1781,6 +1898,152 @@ void Testbed::draw_gui() { } #endif //NGP_GUI +__global__ void to_8bit_color_kernel( + Vector2i resolution, + EColorSpace output_color_space, + cudaSurfaceObject_t surface, + uint8_t* result +) { + uint32_t x = threadIdx.x + blockDim.x * blockIdx.x; + uint32_t y = threadIdx.y + blockDim.y * blockIdx.y; + + if (x >= resolution.x() || y >= resolution.y()) { + return; + } + + Array4f color; + surf2Dread((float4*)&color, surface, x * sizeof(float4), y); + + if (output_color_space == EColorSpace::Linear) { + color.head<3>() = linear_to_srgb(color.head<3>()); + } + + for (uint32_t i = 0; i < 3; ++i) { + result[(x + resolution.x() * y) * 3 + i] = (uint8_t)(tcnn::clamp(color[i], 0.0f, 1.0f) * 255.0f + 0.5f); + } +} + +void Testbed::prepare_next_camera_path_frame() { + if (!m_camera_path.rendering) { + return; + } + + // If we're rendering a video, we'd like to accumulate multiple spp + // for motion blur. Hence dump the frame once the target spp has been reached + // and only reset _then_. + if (m_render_surfaces.front().spp() == m_camera_path.render_settings.spp) { + auto tmp_dir = fs::path{"tmp"}; + if (!tmp_dir.exists()) { + if (!fs::create_directory(tmp_dir)) { + m_camera_path.rendering = false; + tlog::error() << "Failed to create temporary directory 'tmp' to hold rendered images."; + return; + } + } + + Vector2i res = m_render_surfaces.front().out_resolution(); + const dim3 threads = { 16, 8, 1 }; + const dim3 blocks = { div_round_up((uint32_t)res.x(), threads.x), div_round_up((uint32_t)res.y(), threads.y), 1 }; + + GPUMemory<uint8_t> image_data(res.prod() * 3); + to_8bit_color_kernel<<<blocks, threads>>>( + res, + EColorSpace::SRGB, // the GUI always renders in SRGB + m_render_surfaces.front().surface(), + image_data.data() + ); + + m_render_futures.emplace_back(m_thread_pool.enqueue_task([image_data=std::move(image_data), frame_idx=m_camera_path.render_frame_idx++, res, tmp_dir] { + std::vector<uint8_t> cpu_image_data(image_data.size()); + CUDA_CHECK_THROW(cudaMemcpy(cpu_image_data.data(), image_data.data(), image_data.bytes(), cudaMemcpyDeviceToHost)); + stbi_write_jpg(fmt::format("{}/{:06d}.jpg", tmp_dir.str(), frame_idx).c_str(), res.x(), res.y(), 3, cpu_image_data.data(), 100); + })); + + reset_accumulation(true); + + if (m_camera_path.render_frame_idx == m_camera_path.render_settings.n_frames()) { + m_camera_path.rendering = false; + + wait_all(m_render_futures); + m_render_futures.clear(); + + tlog::success() << "Finished rendering '.jpg' video frames to '" << tmp_dir << "'. Assembling them into a video next."; + + fs::path ffmpeg = "ffmpeg"; + +#ifdef _WIN32 + // Under Windows, try automatically downloading FFmpeg binaries if they don't exist + { + // if (system(fmt::format("where {} >nul 2>nul", ffmpeg.str()).c_str()) != 0) { + fs::path root_dir = fs::path{"scripts"}.exists() ? "." : ".."; + if ((root_dir/"external"/"ffmpeg").exists()) { + for (const auto& path : fs::directory{root_dir/"external"/"ffmpeg"}) { + ffmpeg = path/"bin"/"ffmpeg.exe"; + } + } + + if (!ffmpeg.exists()) { + tlog::info() << "FFmpeg not found. Downloading FFmpeg..."; + do_system((root_dir/"scripts"/"download_ffmpeg.bat").str()); + } + + for (const auto& path : fs::directory{root_dir/"external"/"ffmpeg"}) { + ffmpeg = path/"bin"/"ffmpeg.exe"; + } + + if (!ffmpeg.exists()) { + tlog::warning() << "FFmpeg download failed. Trying system-wide FFmpeg."; + } + } +#endif + + auto ffmpeg_command = fmt::format( + "{} -loglevel error -y -framerate {} -i tmp/%06d.jpg -c:v libx264 -preset slow -crf {} -pix_fmt yuv420p {}", + ffmpeg.str(), + m_camera_path.render_settings.fps, + // Quality goes from 0 to 10. This conversion to CRF means a quality of 10 + // is a CRF of 17 and a quality of 0 a CRF of 27, which covers the "sane" + // range of x264 quality settings according to the FFmpeg docs: + // https://trac.ffmpeg.org/wiki/Encode/H.264 + 27 - m_camera_path.render_settings.quality, + m_camera_path.render_settings.filename + ); + int ffmpeg_result = do_system(ffmpeg_command); + if (ffmpeg_result == 0) { + tlog::success() << "Saved video '" << m_camera_path.render_settings.filename << "'"; + } else if (ffmpeg_result == -1) { + tlog::error() << "Video could not be assembled: FFmpeg not found."; + } else { + tlog::error() << "Video could not be assembled: FFmpeg failed"; + } + + clear_tmp_dir(); + } + } + + const auto& rs = m_camera_path.render_settings; + m_camera_path.play_time = (float)((double)m_camera_path.render_frame_idx / (double)rs.n_frames()); + + if (m_render_surfaces.front().spp() == 0) { + set_camera_from_time(m_camera_path.play_time); + apply_camera_smoothing(rs.frame_milliseconds()); + + auto smoothed_camera_backup = m_smoothed_camera; + + // Compute the camera for the next frame in order to be able to compute motion blur + // between it and the current one. + set_camera_from_time(m_camera_path.play_time + 1.0f / rs.n_frames()); + apply_camera_smoothing(rs.frame_milliseconds()); + + m_camera_path.render_frame_end_camera = m_smoothed_camera; + + // Revert camera such that the next frame will be computed correctly + // (Start camera of next frame should be the same as end camera of this frame) + set_camera_from_time(m_camera_path.play_time); + m_smoothed_camera = smoothed_camera_backup; + } +} + void Testbed::train_and_render(bool skip_rendering) { if (m_train) { train(m_training_batch_size); @@ -1799,7 +2062,10 @@ void Testbed::train_and_render(bool skip_rendering) { optimise_mesh_step(1); } - apply_camera_smoothing(m_frame_ms.val()); + // Don't do any smoothing here if a camera path is being rendered. It'll take care + // of the smoothing on its own. + float frame_ms = m_camera_path.rendering ? 0.0f : m_frame_ms.val(); + apply_camera_smoothing(frame_ms); if (!m_render_window || !m_render || skip_rendering) { return; @@ -1812,7 +2078,7 @@ void Testbed::train_and_render(bool skip_rendering) { if ((m_smoothed_camera - m_camera).norm() < 0.001f) { m_smoothed_camera = m_camera; - } else { + } else if (!m_camera_path.rendering) { reset_accumulation(true); } @@ -1863,6 +2129,11 @@ void Testbed::train_and_render(bool skip_rendering) { m_last_render_res_factor = factor; } + if (m_camera_path.rendering) { + render_res = m_camera_path.render_settings.resolution; + m_last_render_res_factor = 1.0f; + } + if (render_buffer.dlss()) { render_res = render_buffer.dlss()->clamp_resolution(render_res); render_buffer.dlss()->update_feature(render_res, render_buffer.dlss()->is_hdr(), render_buffer.dlss()->sharpen()); @@ -1871,18 +2142,23 @@ void Testbed::train_and_render(bool skip_rendering) { render_buffer.resize(render_res); } - render_frame(m_smoothed_camera, m_smoothed_camera, Eigen::Vector4f::Zero(), render_buffer); + render_frame( + m_smoothed_camera, + m_camera_path.rendering ? log_space_lerp(m_smoothed_camera, m_camera_path.render_frame_end_camera, m_camera_path.render_settings.shutter_fraction) : m_smoothed_camera, + {0.0f, 0.0f, 0.0f, 1.0f}, + render_buffer + ); #ifdef NGP_GUI m_render_textures.front()->blit_from_cuda_mapping(); if (m_picture_in_picture_res > 0) { - Vector2i res(m_picture_in_picture_res, m_picture_in_picture_res*9/16); + Vector2i res(m_picture_in_picture_res, m_picture_in_picture_res * 9/16); m_pip_render_surface->resize(res); if (m_pip_render_surface->spp() < 8) { // a bit gross, but let's copy the keyframe's state into the global state in order to not have to plumb through the fov etc to render_frame. CameraKeyframe backup = copy_camera_to_keyframe(); - CameraKeyframe pip_kf = m_camera_path.eval_camera_path(m_camera_path.m_playtime); + CameraKeyframe pip_kf = m_camera_path.eval_camera_path(m_camera_path.play_time); set_camera_from_keyframe(pip_kf); render_frame(pip_kf.m(), pip_kf.m(), Eigen::Vector4f::Zero(), *m_pip_render_surface); set_camera_from_keyframe(backup); @@ -1927,7 +2203,9 @@ void Testbed::train_and_render(bool skip_rendering) { m_visualized_dimension = i-1; m_render_surfaces[i].resize(m_view_size); + render_frame(m_smoothed_camera, m_smoothed_camera, Eigen::Vector4f::Zero(), m_render_surfaces[i]); + m_render_textures[i]->blit_from_cuda_mapping(); ++i; } @@ -2202,6 +2480,11 @@ bool Testbed::frame() { } } + if (m_camera_path.rendering) { + prepare_next_camera_path_frame(); + skip_rendering = false; + } + if (!skip_rendering || (std::chrono::steady_clock::now() - m_last_gui_draw_time_point) > 25ms) { redraw_gui_next_frame(); } @@ -2273,7 +2556,7 @@ void Testbed::set_camera_from_keyframe(const CameraKeyframe& k) { } void Testbed::set_camera_from_time(float t) { - if (m_camera_path.m_keyframes.empty()) + if (m_camera_path.keyframes.empty()) return; set_camera_from_keyframe(m_camera_path.eval_camera_path(t)); } @@ -2638,7 +2921,7 @@ Testbed::Testbed(ETestbedMode mode) { int active_device = cuda_device(); int active_compute_capability = cuda_compute_capability(); - tlog::success() << "Active GPU is #" << active_device << ": " << cuda_device_name() << " [" << active_compute_capability << "]"; + tlog::success() << "Initialized CUDA. Active GPU is #" << active_device << ": " << cuda_device_name() << " [" << active_compute_capability << "]"; if (active_compute_capability < MIN_GPU_ARCH) { tlog::warning() << "Insufficient compute capability " << active_compute_capability << " detected."; @@ -2683,13 +2966,37 @@ Testbed::Testbed(ETestbedMode mode) { Testbed::~Testbed() { + // If any temporary file was created, make sure it's deleted + clear_tmp_dir(); + if (m_render_window) { destroy_window(); } } +bool Testbed::clear_tmp_dir() { + wait_all(m_render_futures); + m_render_futures.clear(); + + bool success = true; + auto tmp_dir = fs::path{"tmp"}; + if (tmp_dir.exists()) { + if (tmp_dir.is_directory()) { + for (const auto& path : fs::directory{tmp_dir}) { + if (path.is_file()) { + success &= path.remove_file(); + } + } + } + + success &= tmp_dir.remove_file(); + } + + return success; +} + void Testbed::train(uint32_t batch_size) { - if (!m_training_data_available) { + if (!m_training_data_available || m_camera_path.rendering) { m_train = false; return; } @@ -3203,7 +3510,7 @@ void Testbed::save_snapshot(const std::string& filepath_string, bool include_opt void Testbed::load_snapshot(const std::string& filepath_string) { auto config = load_network_config(filepath_string); if (!config.contains("snapshot")) { - throw std::runtime_error{fmt::format("File {} does not contain a snapshot.", filepath_string)}; + throw std::runtime_error{fmt::format("File '{}' does not contain a snapshot.", filepath_string)}; } m_network_config_path = filepath_string; @@ -3226,9 +3533,7 @@ void Testbed::load_snapshot(const std::string& filepath_string) { m_aabb = snapshot.value("aabb", m_aabb); m_bounding_radius = snapshot.value("bounding_radius", m_bounding_radius); - if (m_testbed_mode == ETestbedMode::Sdf) { - set_scale(m_bounding_radius * 1.5f); - } else if (m_testbed_mode == ETestbedMode::Nerf) { + if (m_testbed_mode == ETestbedMode::Nerf) { if (snapshot["density_grid_size"] != NERF_GRIDSIZE()) { throw std::runtime_error{"Incompatible grid size."}; } @@ -3285,11 +3590,11 @@ void Testbed::load_camera_path(const std::string& filepath_string) { } bool Testbed::loop_animation() { - return m_camera_path.m_loop; + return m_camera_path.loop; } void Testbed::set_loop_animation(bool value) { - m_camera_path.m_loop = value; + m_camera_path.loop = value; } NGP_NAMESPACE_END