BibTex
@inproceedings{Yengui:2013:,
author = {Yengui, Mohamed and Poulin, Pierre},
title = {Simulated bidirectional texture functions with silhouette details},
booktitle = {Proceedings of Graphics Interface 2013},
series = {GI 2013},
year = {2013},
issn = {0713-5424},
isbn = {978-1-4822-1680-6},
location = {Regina, Saskatchewan, Canada},
pages = {45--54},
numpages = {10},
publisher = {Canadian Human-Computer Communications Society},
address = {Toronto, Ontario, Canada},
}
Abstract
The representation of material appearance requires an understanding of the underlying structures of real surfaces, light-material interaction, and human visual system. The Bidirectional Texture Function (BTF) describes real-world materials as a spatial variation of reflectance, which depends on view and light directions. Real BTFs integrate all optical phenomena occurring in a complex material, such as self-occlusions, interreflections, subsurface scattering, etc., independently of the mesoscopic surface geometry. In this paper, we revisit BTF simulation to improve the modeling of surface appearance. In the recent years, computer graphics has achieved very good levels of image realism on geometrical appearance of 3D scenes. It is therefore logical to think that using this technology to simulate visual effects at the level of the mesoscopic geometry should provide even more realistic simulated BTFs. Our ultimate goal here is thus to produce material appearance as rich and as similar as those in reality, but relying more on the intuition and skills of artists, and on the rendering capacity of today's computer graphics. We have designed a virtual parallel-projection/directional incident illumination framework that exploits rendering coherency in order to produce, in reasonable rendering times and with good compression ratios, BTFs of complex mesoscopic geometry, and this, even at grazing angles. Our current framework can simulate efficiently local interreflections effects within mesoscopic structures, as well as effects due to transparency, silhouettes, and surface curvatures. Our general simulation framework should also prove extensible to several other visual phenomena.