From 5c597f5dc221f95b43ceac93a569c48025afd434 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Thomas=20M=C3=BCller?= <thomas94@gmx.net>
Date: Wed, 19 Jan 2022 08:10:46 +0100
Subject: [PATCH] Add references to arXiv version

---
 README.md                          | 17 +++++++++++++----
 docs/assets/mueller2022instant.bib |  7 +++++++
 docs/index.html                    | 23 +++++++++++------------
 3 files changed, 31 insertions(+), 16 deletions(-)
 create mode 100644 docs/assets/mueller2022instant.bib

diff --git a/README.md b/README.md
index 724bd15..228cb18 100644
--- a/README.md
+++ b/README.md
@@ -9,10 +9,9 @@ In each case, we train and render a MLP with multiresolution hash input encoding
 
 > __Instant Neural Graphics Primitives with a Multiresolution Hash Encoding__  
 > [Thomas Müller](https://tom94.net), [Alex Evans](https://research.nvidia.com/person/alex-evans), [Christoph Schied](https://research.nvidia.com/person/christoph-schied), [Alexander Keller](https://research.nvidia.com/person/alex-keller)  
-> _arXiv [cs.GR], Jan 2022_  
-> __[ [Project page](https://nvlabs.github.io/instant-ngp) ] [ [Paper](https://nvlabs.github.io/instant-ngp/assets/mueller2022instant.pdf) ] [ [Video](https://nvlabs.github.io/instant-ngp/assets/mueller2022instant.mp4) ]__
+> _[arXiv:2201.05989 [cs.CV]](https://arxiv.org/abs/2201.05989), Jan 2022_  
+> __[&nbsp;[Project page](https://nvlabs.github.io/instant-ngp)&nbsp;] [&nbsp;[Paper](https://nvlabs.github.io/instant-ngp/assets/mueller2022instant.pdf)&nbsp;] [&nbsp;[Video](https://nvlabs.github.io/instant-ngp/assets/mueller2022instant.mp4)&nbsp;] [&nbsp;[BibTeX](https://nvlabs.github.io/instant-ngp/assets/mueller2022instant.bib)&nbsp;]__
 
- 
 For business inquiries, please visit our website and submit the form: [NVIDIA Research Licensing](https://www.nvidia.com/en-us/research/inquiries/)
 
 
@@ -175,7 +174,17 @@ This project makes use of a number of awesome open source libraries, including:
 
 Many thanks to the authors of these brilliant projects!
 
-## License
+## License and Citation
+
+```bibtex
+@article{mueller2022instant,
+    title = {Instant Neural Graphics Primitives with a Multiresolution Hash Encoding},
+    author = {Thomas M\"uller and Alex Evans and Christoph Schied and Alexander Keller},
+    journal = {arXiv:2201.05989},
+    year = {2022},
+    month = jan
+}
+```
 
 Copyright © 2022, NVIDIA Corporation. All rights reserved.
 
diff --git a/docs/assets/mueller2022instant.bib b/docs/assets/mueller2022instant.bib
new file mode 100644
index 0000000..eb605e1
--- /dev/null
+++ b/docs/assets/mueller2022instant.bib
@@ -0,0 +1,7 @@
+@article{mueller2022instant,
+	title = {Instant Neural Graphics Primitives with a Multiresolution Hash Encoding},
+	author = {Thomas M\"uller and Alex Evans and Christoph Schied and Alexander Keller},
+	journal = {arXiv:2201.05989},
+	year = {2022},
+	month = jan
+}
diff --git a/docs/index.html b/docs/index.html
index 915414e..ddeaf73 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -295,7 +295,6 @@ figure {
 		<figure style="width: 100%; float: left">
 			<p class="caption_justify">
 				We demonstrate near-instant training of neural graphics primitives on a single GPU for multiple tasks. In <b>gigapixel image</b> we represent an image by a neural network. <b>SDF</b> learns a signed distance function in 3D space whose zero level-set represents a 2D surface.
-				<!--<b>Neural radiance caching</b> (NRC) <a href="https://research.nvidia.com/publication/2021-06_Real-time-Neural-Radiance">[Müller et al. 2021]</a> employs a neural network that is trained in real-time to cache costly lighting calculations-->
 				<b>NeRF</b> <a href="https://research.nvidia.com/publication/2021-06_Real-time-Neural-Radiance">[Mildenhall et al. 2020]</a> uses 2D images and their camera poses to reconstruct a volumetric radiance-and-density field that is visualized using ray marching.
 				Lastly, <b>neural volume</b> learns a denoised radiance and density field directly from a volumetric path tracer.
 				In all tasks, our encoding and its efficient implementation provide clear benefits: instant training, high quality, and simplicity. Our encoding is task-agnostic: we use the same implementation and hyperparameters across all tasks and only vary the hash table size which trades off quality and performance.
@@ -308,8 +307,8 @@ figure {
 		<h2>News</h2>
 		<hr>
 		<div class="row">
-			<div><span class="material-icons"> integration_instructions </span> [Jan 2022] Code released on <a href="https://github.com/NVlabs/instant-ngp">GitHub</a>.</div>
-			<!-- <div><span class="material-icons"> description </span> [Jan 2022] Paper released on <a href="https://arxiv.org/abs/XXX">arXiv</a>.</div> -->
+			<div><span class="material-icons"> description </span> [Jan 19th 2022] Paper released on <a href="https://arxiv.org/abs/2201.05989">arXiv</a>.</div>
+			<div><span class="material-icons"> integration_instructions </span> [Jan 14th 2022] Code released on <a href="https://github.com/NVlabs/instant-ngp">GitHub</a>.</div>
 		</div>
 	</section>
 
@@ -468,8 +467,8 @@ figure {
 				<p>Thomas Müller, Alex Evans, Christoph Schied, Alexander Keller</p>
 
 				<div><span class="material-icons"> description </span><a href="assets/mueller2022instant.pdf"> Paper preprint (PDF, 15.3 MB)</a></div>
-				<!-- <div><span class="material-icons"> description </span><a href="https://arxiv.org/abs/xxxx.xxxxx"> arXiv version</a></div> -->
-				<!-- <div><span class="material-icons"> insert_comment </span><a href="assets/mueller2022instant.bib"> BibTeX</a></div> -->
+				<div><span class="material-icons"> description </span><a href="https://arxiv.org/abs/2201.05989"> arXiv version</a></div>
+				<div><span class="material-icons"> insert_comment </span><a href="assets/mueller2022instant.bib"> BibTeX</a></div>
 				<div><span class="material-icons"> integration_instructions </span><a href="https://github.com/NVlabs/instant-ngp"> Code</a></div>
 				<div><span class="material-icons"> videocam </span><a href="assets/mueller2022instant.mp4"> Video</a></div>
 
@@ -478,18 +477,18 @@ figure {
 		</div>
 	</section>
 
-	<!-- <section id="bibtex">
+	<section id="bibtex">
 		<h2>Citation</h2>
 		<hr>
 		<pre><code>@article{mueller2022instant,
-	title = {Instant Neural Graphics Primitives with a Multiresolution Hash Encoding},
-	author = {Thomas M\"uller and Alex Evans and Christoph Schied and Alexander Keller},
-	journal = {arXiv:XXX},
-	year = {2022},
-	month = jan
+    title = {Instant Neural Graphics Primitives with a Multiresolution Hash Encoding},
+    author = {Thomas M\"uller and Alex Evans and Christoph Schied and Alexander Keller},
+    journal = {arXiv:2201.05989},
+    year = {2022},
+    month = jan
 }
 </code></pre>
-	</section> -->
+	</section>
 
 	<section id="acknowledgements">
 		<h2>Acknowledgements</h2>
-- 
GitLab