@misc{cogprints3329, volume = {101}, editor = {Christopher G. Prince and Luc Berthouze and Hideki Kozima and Daniel Bullock and Georgi Stojanov and Christian Balkenius}, title = {The Whole World in Your Hand: Active and Interactive Segmentation}, author = {Artur Arsenio and Paul Fitzpatrick and Charles C. Kemp and Giorgio Metta}, publisher = {Lund University Cognitive Studies}, year = {2003}, pages = {49--56}, keywords = {object segmentation, computer vision, robotic system, wearable system}, url = {http://cogprints.org/3329/}, abstract = {Object segmentation is a fundamental problem in computer vision and a powerful resource for development. This paper presents three embodied approaches to the visual segmentation of objects. Each approach to segmentation is aided by the presence of a hand or arm in the proximity of the object to be segmented. The first approach is suitable for a robotic system, where the robot can use its arm to evoke object motion. The second method operates on a wearable system, viewing the world from a human's perspective, with instrumentation to help detect and segment objects that are held in the wearer's hand. The third method operates when observing a human teacher, locating periodic motion (finger/arm/object waving or tapping) and using it as a seed for segmentation. We show that object segmentation can serve as a key resource for development by demonstrating methods that exploit high-quality object segmentations to develop both low-level vision capabilities (specialized feature detectors) and high-level vision capabilities (object recognition and localization).} }