@inproceedings{breitbarth2013lighting, type = {inproceedings}, key = {breitbarth2013lighting}, title = {Lighting estimation in fringe images during motion compensation for 3D measurements}, author = {Andreas Breitbarth and Peter Kühmstedt and Gunther Notni and Joachim Denzler}, booktitle = {Videometrics, Range Imaging, and Applications XII; and Automated Visual Inspection}, year = {2013}, editor = {Fabio Remondino and Mark R. Shortis and Jürgen Beyerer and Fernando Puente Leon}, month = {may}, pages = {87910P:1-9}, publisher = {SPIE}, volume = {8791}, abstract = {Fringe projection is an established method to measure the 3D structure of macroscopic objects. To receive both a high accuracy and robustness a certain number of images with pairwise different projection pattern is necessary. Over this sequence it is necessary that each 3D object point corresponds to the same image point at every time. This situation is no longer given for moving measurement objects. One possibility to solve this problem is to restore the static situation. Therefore, the acquired camera images have to be realigned and secondly the degree of fringe shift hast to be estimated. Furthermore, there exists another variable: change in lighting. These variances cannot be compensated, but it has to be approximately determined and integrated into the calculation process of 3D data. The possibility to obtain an accurate measurement is not being given because due to the condition that each arbitrary interferogram comprised three unknowns: additive and multiplicative intensity distortion and the phase distribution. The changes in lighting are described by the first two parameters and have to be determined for each camera pixel and each image. So, there is a trade-off problem: Two variables, but only one equation. We propose a method to estimate these lighting changes for each camera pixel with respect to their neighbors at each point in time. The size of local neighborhoods in the presented algorithms is chosen adaptively in respect to the gradients of object structure because for accurate 3D measurements you need both: sharp edges and on the other side smoothness in regions with very low contrast in intensities or wide fringe periods. To speed up the estimation of lighting values, not all pixel of the neighborhood were taken into account. Depending on the direction of projected fringes, either axially parallel or diagonal adjacent pixels are used. Taken together, our method results in a motion compensated dense 3D point cloud without any artifacts.}, keywords = {3D measurements, fringe projection, moving objects, motion compensation, lighting changes}, link = {dx.doi.org/10.1117/12.2020500}, url = {http://dx.doi.org/10.1117/12.2020500}, }