@inproceedings{14798,
  abstract     = {A faithful reproduction of gloss is inherently difficult because of the limited dynamic range, peak luminance, and 3D capabilities of display devices. This work investigates how the display capabilities affect gloss appearance with respect to a real-world reference object. To this end, we employ an accurate imaging pipeline to achieve a perceptual gloss match between a virtual and real object presented side-by-side on an augmented-reality high-dynamic-range (HDR) stereoscopic display, which has not been previously attained to this extent. Based on this precise gloss reproduction, we conduct a series of gloss matching experiments to study how gloss perception degrades based on individual factors: object albedo, display luminance, dynamic range, stereopsis, and tone mapping. We support the study with a detailed analysis of individual factors, followed by an in-depth discussion on the observed perceptual effects. Our experiments demonstrate that stereoscopic presentation has a limited effect on the gloss matching task on our HDR display. However, both reduced luminance and dynamic range of the display reduce the perceived gloss. This means that the visual system cannot compensate for the changes in gloss appearance across luminance (lack of gloss constancy), and the tone mapping operator should be carefully selected when reproducing gloss on a low dynamic range (LDR) display.},
  author       = {Chen, Bin and Jindal, Akshay and Piovarci, Michael and Wang, Chao and Seidel, Hans Peter and Didyk, Piotr and Myszkowski, Karol and Serrano, Ana and Mantiuk, Rafał K.},
  booktitle    = {Proceedings of the SIGGRAPH Asia 2023 Conference},
  isbn         = {9798400703157},
  location     = {Sydney, Australia},
  publisher    = {Association for Computing Machinery},
  title        = {{The effect of display capabilities on the gloss consistency between real and virtual objects}},
  doi          = {10.1145/3610548.3618226},
  year         = {2023},
}

@article{12972,
  abstract     = {Embroidery is a long-standing and high-quality approach to making logos and images on textiles. Nowadays, it can also be performed via automated machines that weave threads with high spatial accuracy. A characteristic feature of the appearance of the threads is a high degree of anisotropy. The anisotropic behavior is caused by depositing thin but long strings of thread. As a result, the stitched patterns convey both color and direction. Artists leverage this anisotropic behavior to enhance pure color images with textures, illusions of motion, or depth cues. However, designing colorful embroidery patterns with prescribed directionality is a challenging task, one usually requiring an expert designer. In this work, we propose an interactive algorithm that generates machine-fabricable embroidery patterns from multi-chromatic images equipped with user-specified directionality fields.We cast the problem of finding a stitching pattern into vector theory. To find a suitable stitching pattern, we extract sources and sinks from the divergence field of the vector field extracted from the input and use them to trace streamlines. We further optimize the streamlines to guarantee a smooth and connected stitching pattern. The generated patterns approximate the color distribution constrained by the directionality field. To allow for further artistic control, the trade-off between color match and directionality match can be interactively explored via an intuitive slider. We showcase our approach by fabricating several embroidery paths.},
  author       = {Liu, Zhenyuan and Piovarci, Michael and Hafner, Christian and Charrondiere, Raphael and Bickel, Bernd},
  issn         = {1467-8659},
  journal      = {Computer Graphics Forum},
  keywords     = {embroidery, design, directionality, density, image},
  location     = {Saarbrucken, Germany},
  number       = {2},
  pages        = {397--409},
  publisher    = {Wiley},
  title        = {{Directionality-aware design of embroidery patterns}},
  doi          = {10.1111/cgf.14770 },
  volume       = {42},
  year         = {2023},
}

@inproceedings{12976,
  abstract     = {3D printing based on continuous deposition of materials, such as filament-based 3D printing, has seen widespread adoption thanks to its versatility in working with a wide range of materials. An important shortcoming of this type of technology is its limited multi-material capabilities. While there are simple hardware designs that enable multi-material printing in principle, the required software is heavily underdeveloped. A typical hardware design fuses together individual materials fed into a single chamber from multiple inlets before they are deposited. This design, however, introduces a time delay between the intended material mixture and its actual deposition. In this work, inspired by diverse path planning research in robotics, we show that this mechanical challenge can be addressed via improved printer control. We propose to formulate the search for optimal multi-material printing policies in a reinforcement
learning setup. We put forward a simple numerical deposition model that takes into account the non-linear material mixing and delayed material deposition. To validate our system we focus on color fabrication, a problem known for its strict requirements for varying material mixtures at a high spatial frequency. We demonstrate that our learned control policy outperforms state-of-the-art hand-crafted algorithms.},
  author       = {Liao, Kang and Tricard, Thibault and Piovarci, Michael and Seidel, Hans-Peter and Babaei, Vahid},
  booktitle    = {2023 IEEE International Conference on Robotics and Automation},
  issn         = {1050-4729},
  keywords     = {reinforcement learning, deposition, control, color, multi-filament},
  location     = {London, United Kingdom},
  pages        = {12345--12352},
  publisher    = {IEEE},
  title        = {{Learning deposition policies for fused multi-material 3D printing}},
  doi          = {10.1109/ICRA48891.2023.10160465},
  volume       = {2023},
  year         = {2023},
}

@inproceedings{12979,
  abstract     = {Color and gloss are fundamental aspects of surface appearance. State-of-the-art fabrication techniques can manipulate both properties of the printed 3D objects. However, in the context of appearance reproduction, perceptual aspects of color and gloss are usually handled separately, even though previous perceptual studies suggest their interaction. Our work is motivated by previous studies demonstrating a perceived color shift due to a change in the object's gloss, i.e., two samples with the same color but different surface gloss appear as they have different colors. In this paper, we conduct new experiments which support this observation and provide insights into the magnitude and direction of the perceived color change. We use the observations as guidance to design a new method that estimates and corrects the color shift enabling the fabrication of objects with the same perceived color but different surface gloss. We formulate the problem as an optimization procedure solved using differentiable rendering. We evaluate the effectiveness of our method in perceptual experiments with 3D objects fabricated using a multi-material 3D printer and demonstrate potential applications. },
  author       = {Condor, Jorge and Piovarci, Michael and Bickel, Bernd and Didyk, Piotr},
  booktitle    = {SIGGRAPH ’23 Conference Proceedings},
  isbn         = {9798400701597},
  keywords     = {color, gloss, perception, color compensation, color management},
  location     = {Los Angeles, CA, United States},
  publisher    = {Association for Computing Machinery},
  title        = {{Gloss-aware color correction for 3D printing}},
  doi          = {10.1145/3588432.3591546},
  year         = {2023},
}

@article{12984,
  abstract     = {Tattoos are a highly popular medium, with both artistic and medical applications. Although the mechanical process of tattoo application has evolved historically, the results are reliant on the artisanal skill of the artist. This can be especially challenging for some skin tones, or in cases where artists lack experience. We provide the first systematic overview of tattooing as a computational fabrication technique. We built an automated tattooing rig and a recipe for the creation of silicone sheets mimicking realistic skin tones, which allowed us to create an accurate model predicting tattoo appearance. This enables several exciting applications including tattoo previewing, color retargeting, novel ink spectra optimization, color-accurate prosthetics, and more.},
  author       = {Piovarci, Michael and Chapiro, Alexandre and Bickel, Bernd},
  issn         = {1557-7368},
  journal      = {Transactions on Graphics},
  keywords     = {appearance, modeling, reproduction, tattoo, skin color, gamut mapping, ink-optimization, prosthetic},
  location     = {Los Angeles, CA, United States},
  number       = {4},
  publisher    = {Association for Computing Machinery},
  title        = {{Skin-Screen: A computational fabrication framework for color tattoos}},
  doi          = {10.1145/3592432},
  volume       = {42},
  year         = {2023},
}

@article{11442,
  abstract     = {Enabling additive manufacturing to employ a wide range of novel, functional materials can be a major boost to this technology. However, making such materials printable requires painstaking trial-and-error by an expert operator,
as they typically tend to exhibit peculiar rheological or hysteresis properties. Even in the case of successfully finding the process parameters, there is no guarantee of print-to-print consistency due to material differences between batches. These challenges make closed-loop feedback an attractive option where the process parameters are adjusted on-the-fly. There are several challenges for designing an efficient controller: the deposition parameters are complex and highly coupled, artifacts occur after long time horizons, simulating the deposition is computationally costly, and learning on hardware is intractable. In this work, we demonstrate the feasibility of learning a closed-loop control policy for additive manufacturing using reinforcement learning. We show that approximate, but efficient, numerical simulation is
sufficient as long as it allows learning the behavioral patterns of deposition that translate to real-world experiences. In combination with reinforcement learning, our model can be used to discover control policies that outperform
baseline controllers. Furthermore, the recovered policies have a minimal sim-to-real gap. We showcase this by applying our control policy in-vivo on a single-layer, direct ink writing printer. },
  author       = {Piovarci, Michael and Foshey, Michael and Xu, Jie and Erps, Timothy and Babaei, Vahid and Didyk, Piotr and Rusinkiewicz, Szymon and Matusik, Wojciech and Bickel, Bernd},
  issn         = {1557-7368},
  journal      = {ACM Transactions on Graphics},
  number       = {4},
  publisher    = {Association for Computing Machinery},
  title        = {{Closed-loop control of direct ink writing via reinforcement learning}},
  doi          = {10.1145/3528223.3530144},
  volume       = {41},
  year         = {2022},
}

@inproceedings{12135,
  abstract     = {A good match of material appearance between real-world objects and their digital on-screen representations is critical for many applications such as fabrication, design, and e-commerce. However, faithful appearance reproduction is challenging, especially for complex phenomena, such as gloss. In most cases, the view-dependent nature of gloss and the range of luminance values required for reproducing glossy materials exceeds the current capabilities of display devices. As a result, appearance reproduction poses significant problems even with accurately rendered images. This paper studies the gap between the gloss perceived from real-world objects and their digital counterparts. Based on our psychophysical experiments on a wide range of 3D printed samples and their corresponding photographs, we derive insights on the influence of geometry, illumination, and the display’s brightness and measure the change in gloss appearance due to the display limitations. Our evaluation experiments demonstrate that using the prediction to correct material parameters in a rendering system improves the match of gloss appearance between real objects and their visualization on a display device.},
  author       = {Chen, Bin and Piovarci, Michael and Wang, Chao and Seidel, Hans-Peter and Didyk, Piotr and Myszkowski, Karol and Serrano, Ana},
  booktitle    = {SIGGRAPH Asia 2022 Conference Papers},
  isbn         = {9781450394703},
  location     = {Daegu, South Korea},
  publisher    = {Association for Computing Machinery},
  title        = {{Gloss management for consistent reproduction of real and virtual objects}},
  doi          = {10.1145/3550469.3555406},
  volume       = {2022},
  year         = {2022},
}

@inproceedings{10148,
  abstract     = {Tactile feedback of an object’s surface enables us to discern its material properties and affordances. This understanding is used in digital fabrication processes by creating objects with high-resolution surface variations to influence a user’s tactile perception. As the design of such surface haptics commonly relies on knowledge from real-life experiences, it is unclear how to adapt this information for digital design methods. In this work, we investigate replicating the haptics of real materials. Using an existing process for capturing an object’s microgeometry, we digitize and reproduce the stable surface information of a set of 15 fabric samples. In a psychophysical experiment, we evaluate the tactile qualities of our set of original samples and their replicas. From our results, we see that direct reproduction of surface variations is able to influence different psychophysical dimensions of the tactile perception of surface textures. While the fabrication process did not preserve all properties, our approach underlines that replication of surface microgeometries benefits fabrication methods in terms of haptic perception by covering a large range of tactile variations. Moreover, by changing the surface structure of a single fabricated material, its material perception can be influenced. We conclude by proposing strategies for capturing and reproducing digitized textures to better resemble the perceived haptics of the originals.},
  author       = {Degraen, Donald and Piovarci, Michael and Bickel, Bernd and Kruger, Antonio},
  booktitle    = {34th Annual ACM Symposium},
  isbn         = {978-1-4503-8635-7},
  location     = {Virtual},
  pages        = {954--971},
  publisher    = {Association for Computing Machinery},
  title        = {{Capturing tactile properties of real surfaces for haptic reproduction}},
  doi          = {10.1145/3472749.3474798},
  year         = {2021},
}

@article{10574,
  abstract     = {The understanding of material appearance perception is a complex problem due to interactions between material reflectance, surface geometry, and illumination. Recently, Serrano et al. collected the largest dataset to date with subjective ratings of material appearance attributes, including glossiness, metallicness, sharpness and contrast of reflections. In this work, we make use of their dataset to investigate for the first time the impact of the interactions between illumination, geometry, and eight different material categories in perceived appearance attributes. After an initial analysis, we select for further analysis the four material categories that cover the largest range for all perceptual attributes: fabric, plastic, ceramic, and metal. Using a cumulative link mixed model (CLMM) for robust regression, we discover interactions between these material categories and four representative illuminations and object geometries. We believe that our findings contribute to expanding the knowledge on material appearance perception and can be useful for many applications, such as scene design, where any particular material in a given shape can be aligned with dominant classes of illumination, so that a desired strength of appearance attributes can be achieved.},
  author       = {Chen, Bin and Wang, Chao and Piovarci, Michael and Seidel, Hans Peter and Didyk, Piotr and Myszkowski, Karol and Serrano, Ana},
  issn         = {1432-2315},
  journal      = {Visual Computer},
  number       = {12},
  pages        = {2975--2987},
  publisher    = {Springer Nature},
  title        = {{The effect of geometry and illumination on appearance perception of different material categories}},
  doi          = {10.1007/s00371-021-02227-x},
  volume       = {37},
  year         = {2021},
}

@article{9820,
  abstract     = {Material appearance hinges on material reflectance properties but also surface geometry and illumination. The unlimited number of potential combinations between these factors makes understanding and predicting material appearance a very challenging task. In this work, we collect a large-scale dataset of perceptual ratings of appearance attributes with more than 215,680 responses for 42,120 distinct combinations of material, shape, and illumination. The goal of this dataset is twofold. First, we analyze for the first time the effects of illumination and geometry in material perception across such a large collection of varied appearances. We connect our findings to those of the literature, discussing how previous knowledge generalizes across very diverse materials, shapes, and illuminations. Second, we use the collected dataset to train a deep learning architecture for predicting perceptual attributes that correlate with human judgments. We demonstrate the consistent and robust behavior of our predictor in various challenging scenarios, which, for the first time, enables estimating perceived material attributes from general 2D images. Since our predictor relies on the final appearance in an image, it can compare appearance properties across different geometries and illumination conditions. Finally, we demonstrate several applications that use our predictor, including appearance reproduction using 3D printing, BRDF editing by integrating our predictor in a differentiable renderer, illumination design, or material recommendations for scene design.},
  author       = {Serrano, Ana and Chen, Bin and Wang, Chao and Piovarci, Michael and Seidel, Hans Peter and Didyk, Piotr and Myszkowski, Karol},
  issn         = {15577368},
  journal      = {ACM Transactions on Graphics},
  number       = {4},
  publisher    = {Association for Computing Machinery},
  title        = {{The effect of shape and illumination on material perception: Model and applications}},
  doi          = {10.1145/3450626.3459813},
  volume       = {40},
  year         = {2021},
}

