publications([{ "lang": "en", "publisher": "ACM", "doi": "https://doi.org/10.1145/3399715.3399851", "title": "Target Expansion in Context: the Case of Menu in Handheld Augmented Reality", "url": "https://hal.archives-ouvertes.fr/hal-02960631", "abstract": "Target expansion techniques facilitate pointing by enlarging the effective sizes of targets. As opposed to the numerous studies on target expansion solely focusing on optimizing pointing, we study the compound task of pointing at a Point of Interest (POI) and then interacting with the POI menu in handheld Augmented Reality (AR). A POI menu in AR has a fixed position because it contains relevant information about its location in the real world. We present two techniques that make the cursor jump to the closest opened POI menu after pointing at a POI. Our experimental results show that 1) for selecting a POI the expansion techniques are 31 % faster than the baseline screen-centered crosshair pointing technique, 2) the expansion techniques with/without a jumping cursor to the closest opened POI menu offer similar performances and 3) Touch relative pointing is preferred by participants because it minimizes physical movements.", "authors": { "1": { "first_name": "Patrick", "last_name": "Perea" }, "2": { "first_name": "Denis", "last_name": "Morand" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2020, "uri": "http://iihm.imag.fr/publication/PMN20e/", "pages": "1-9", "bibtype": "inproceedings", "id": 884, "abbr": "PMN20e", "address": "Ischia Island, Italy", "date": "2020-09-28", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "AVI '20: International Conference on Advanced Visual Interfaces", "type_publi": "icolcomlec" }, { "lang": "fr", "type_publi": "these", "title": "Mobile augmented reality interaction : digital objects exploration and pointing", "url": "https://tel.archives-ouvertes.fr/tel-03012809", "abstract": "This thesis contributes to the research field of Human-Computer Interaction (HCI). The focus of the research is on user interaction with handheld Augmented Reality (AR) systems.AR allows the addition of digital content, primarily graphics, to the user's physical environment. The resulting mixed environment includes digital objects registered in the physical world. This mixed environment, partially visible on the mobile device’s screen, defines several constraints for interaction. Our work is dedicated to the selection of a digital target in this mixed environment and we address two questions: (1) how to improve the perception of the mixed environment beyond what is perceived by the camera’s field of view of the mobile device, for finding off-screen digital targets (2) how to enhance pointing at a digital target registered in the physical environment. Our contributions answer these two questions.We first propose three interactive off-screen guidance techniques, which we evaluate by conducting two laboratory experiments. These techniques are based on Halo, a visualization technique of off-screen objects based on circles, and differ in the way the aggregation of the off-screen objects is displayed on screen. The results of the two experiments show that our three techniques effectively extend users’ knowledge of the mixed environment, and limit the visual intrusion on the mobile device’s screen in comparison with the traditional arrow-based visualization technique.We then define two interaction techniques enhancing the selection of digital targets. These techniques are based on (1) target expansion techniques, which facilitate target selection by allocating a larger active area to each target and (2) a cursor jump, which shortens the distance between the cursor and the content of the digital target. The two techniques differ in the way users manipulate the cursor once it has jumped to the digital target. We propose two possibilities: physical pointing by moving the mobile device to the desired content, or relative pointing using thumb strokes on the screen. The result of a laboratory experiment confirms that target expansion techniques improve the pointing performance, while relative pointing is preferred by participants.Our contributions are applied to the field of industrial maintenance, in charge of repairing or preventing failures on production machines. As part of a Schneider Electric-CIFRE thesis, the Schneider Electric’s Augmented Operator Advisor product (AR maintenance assistance application) includes one of the proposed targeting assistance techniques.", "year": 2020, "uri": "http://iihm.imag.fr/publication/P20a/", "id": 896, "bibtype": "phdthesis", "abbr": "P20a", "authors": { "1": { "first_name": "Patrick", "last_name": "Perea" } }, "date": "2020-07-06", "type": "Thèses et habilitations", "pages": "174" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3343055.3359719", "title": "Spotlight on Off-Screen Points of Interest in Handheld Augmented Reality: Halo-based techniques", "abstract": "Navigating Augmented Reality (AR) environments with a handheld device often requires users to access digital contents (i.e. Points of Interests - POIs) associated with physical objects outside the field of view of the device's camera. Halo3D is a technique that displays the location of off-screen POIs as halos (arcs) along the edges of the screen. Halo3D reduces clutter by aggregating POIs but has not been evaluated. The results of a first experiment show that an enhanced version of Halo3D was 18% faster than the focus+context technique AroundPlot* for pointing at a POI, and perceived as 34% less intrusive than the arrow-based technique Arrow2D. The results of a second experiment in more realistic settings reveal that two variants of Halo3D that show the spatial distribution of POIs in clusters (1) enable an effective understanding of the off-screen environment and (2) require less effort than AroundPlot* to find POIs in the environment.", "year": 2019, "uri": "http://iihm.imag.fr/publication/PMN19a/", "pages": "43-54", "bibtype": "inproceedings", "id": 868, "abbr": "PMN19a", "authors": { "1": { "first_name": "Patrick", "last_name": "Perea" }, "2": { "first_name": "Denis", "last_name": "Morand" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "date": "2019-11-10", "document": "http://iihm.imag.fr/publs/2019/PereaMorandNigay19.pdf", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of the 2019 ACM International Conference on Interactive Surfaces and Spaces (ISS '19)" }, { "lang": "fr", "type_publi": "colcomlec", "doi": "https://doi.org/10.1145/3132129.3132144", "title": "Halo3D : une Technique pour Visualiser les Points d’Intérêt Hors-Champ en Réalité Augmentée Mobile / Halo3D: a technique for visualizing off-screen points of interest in mobile augmented reality", "url": "https://hal.archives-ouvertes.fr/hal-01578645", "abstract": "When working with mobile Augmented Reality (AR), users often need to visualize off-screen points of interest (POIs). These POIs belong to the context since they are not directly observable in the 3D first-person view on screen. The aim is to present the 3D direction and distance of each POI in a 3D first-person view. The context in mobile AR can include a large number of POIs including locally dense clusters as in mobile AR systems for production plant machine maintenance. \r\nExisting solutions display 3D arrows or an area on the edges of the screen to represent the POIs of the context. These techniques display the direction but not the distance of each POI. We present Halo3D, a mobile AR adaptation of a 2D visualization technique. Halo3D displays the 3D direction and distance of off-screen POIs in a high POI-density environment. The paper describes the design elements of Halo3D and outlines the experimental study to be conducted. The first experimental results indicate that users prefer visual attributes that minimize the visual intrusion on screen.", "authors": { "1": { "first_name": "Patrick", "last_name": "Perea" }, "2": { "first_name": "Denis", "last_name": "Morand" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2017, "uri": "http://iihm.imag.fr/publication/PMN17b/", "pages": "43-51", "bibtype": "inproceedings", "id": 808, "abbr": "PMN17b", "address": "Poitiers, France", "date": "2017-08-29", "document": "http://iihm.imag.fr/publs/2017/Halo3D.pdf", "type": "Conférences nationales avec comité de lecture sur texte complet", "booktitle": "29ème conférence francophone sur l'Interaction Homme-Machine, IHM 2017, Poitiers, France, ACM" }, { "lang": "en", "type_publi": "colloque", "doi": "https://doi.org/10.1109/ISMAR-Adjunct.2017.58", "title": "Halo3D: A Technique for Visualizing Off-Screen Points of Interest in Mobile Augmented Reality", "abstract": "When working with mobile Augmented Reality (AR) applications, users need to be aware of relevant points of interest (POIs) that are located off-screen. These POIs belong to the context since they are not observable in the 3D first-person AR view on screen. The context in mobile AR can include a large number of POIs including locally dense clusters as in mobile AR applications for production plant machine maintenance. Existing solutions display 3D arrows or an area on the edges of the screen to represent the POIs of the context. These techniques display the direction but not the distance of each POI. We present Halo3D, a visualization technique that conveys the 3D direction and distance of off-screen POIs while avoiding overlap and clutter in a high-POI-density AR environment.", "year": 2017, "uri": "http://iihm.imag.fr/publication/PMN17a/", "pages": "170-175", "bibtype": "inproceedings", "id": 810, "editor": "IEEE", "authors": { "1": { "first_name": "Patrick", "last_name": "Perea" }, "2": { "first_name": "Denis", "last_name": "Morand" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "date": "2017-10-09", "document": "http://iihm.imag.fr/publs/2017/ISMAR_Halo3D_Augmented_Reality.pdf", "type": "Autres conférences et colloques avec actes", "booktitle": "ISMAR 2017 IEEE International Symposium on Mixed and Augmented Reality (ISMAR-Adjunct)", "abbr": "PMN17a" }]);