publications([{ "lang": "en", "publisher": "Springer", "doi": "http://dx.doi.org/10.1007/978-4-431-55861-3_8", "title": "Mixed Reality Navigation on a Tablet Computer for Supporting Machine Maintenance in Wide-area Indoor Environment ", "abstract": "This paper describes a maintenance service support system for wide-area indoor environment, such as a factory and a hospital. In maintenance services, operators often have to check a map to find out a way to a target machine, and also have to refer documents to get information about check-up and repair of the machine. In order to reduce working load of operators, information technology can help operators carry out additional but important operations during maintenance, such as referring documents and maps, recording maintenance logs and so on. In this paper, we propose mixed reality navigation on a tablet computer composed of augmented virtuality mode and augmented reality mode. Augmented virtuality mode performs map-based navigation shows positions of the user and the target machine. Augmented reality mode performs intuitive visualization of information about the machine by overlaying annotations on camera images. The proposed system is based on a hybrid localization technique realized with pedestrian dead reckoning (PDR) and 3D model-based image processing for the purpose of covering wide-area indoor environment. Experimental results using our prototype with a mock-up model of a machine are also described for showing feasibility of our concept in the paper. ", "authors": { "1": { "first_name": "Koji", "last_name": "Makita" }, "2": { "first_name": "Thomas", "last_name": "Vincent" }, "3": { "first_name": "Soichi", "last_name": "Ebisuno" }, "4": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "5": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "6": { "first_name": "Takashi", "last_name": "Okuma" }, "7": { "first_name": "Minoru", "last_name": "Yoshida" }, "8": { "first_name": "Laurence", "last_name": "Nigay" }, "9": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2014, "uri": "http://iihm.imag.fr/publication/MVE+14a/", "pages": "41-47", "bibtype": "inproceedings", "id": 705, "abbr": "MVE+14a", "address": "Yokohama, Japan", "date": "2014-09-14", "document": "http://iihm.imag.fr/publs/2014/ICServ2014.pdf", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Conference Proceedings of ICServ2014, the 2nd International Conference on Serviceology", "type_publi": "icolcomlec" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "http://dx.doi.org/10.1145/2556288.2557242", "title": "Direct Drawing on 3D Shapes with Automated Camera Control", "abstract": "We present ACCD, an interaction technique that allows di­rect drawing of long curves on 3D shapes with a tablet display over both multiple depth layers and multiple viewpoints. ACCD reduces the number of explicit viewpoint manipu­lations by combining self-occlusion management and auto­mated camera control. As such it enables drawing on oc­cluded faces but also around a 3D shape while keeping a con­stant drawing precision. Our experimental results indicates the efficacy of ACCD over conventional techniques.", "authors": { "1": { "first_name": "Michael", "last_name": "Ortega" }, "2": { "first_name": "Thomas", "last_name": "Vincent" } }, "year": 2014, "uri": "http://iihm.imag.fr/publication/OV14a/", "pages": "2047-2050", "bibtype": "inproceedings", "id": 671, "abbr": "OV14a", "address": "Toronto, CANADA", "date": "2014-02-19", "document": "http://iihm.imag.fr/publs/2014/CHI2014_Direct_Drawing_on_3D_Shapes_with_Automated_Camera_Control.pdf", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of the international conference on Human factors in computing systems (CHI 2014)" }, { "lang": "en", "type_publi": "these", "doi": "https://tel.archives-ouvertes.fr/tel-01551808", "title": "Handheld Augmented Reality Interaction: Spatial Relations", "abstract": "We explored interaction within the context of handheld Augmented Reality (AR), where a handheld\r\ndevice is used as a physical magic lens to ’augment’ the physical surrounding. We focused, in\r\nparticular, on the role of spatial relations between the on-screen content and the physical surrounding.\r\nOn the one hand, spatial relations define opportunities for mixing environments, such as the\r\nadaptation of the digital augmentation to the user’s location. On the other hand, spatial relations\r\ninvolve specific constraints for interaction such as the impact of hand tremor on on-screen camera\r\nimage stability. The question is then, how can we relax spatial constraints while maintaining the\r\nfeeling of digital-physical collocation. Our contribution is three-fold.\r\n• First, we propose a design space for handheld AR on-screen content with a particular focus\r\non the spatial relations between the different identified frames of reference. This design space\r\ndefines a framework for systematically studying interaction with handheld AR applications.\r\n• Second, we propose and evaluate different handheld AR pointing techniques to improve pointing\r\nprecision. Indeed, with handheld AR set-up, both touch-screen input and the spatial\r\nrelations between the on-screen content and the physical surrounding impair the precision of\r\npointing.\r\n• Third, as part of a collaborative research project involving AIST-Tsukuba and Schneider-\r\nFrance and Japan, we developed a toolkit supporting the development of handheld AR applications.\r\nThe toolkit has been used to develop several demonstrators.", "year": 2014, "uri": "http://iihm.imag.fr/publication/V14a/", "id": 709, "bibtype": "phdthesis", "abbr": "V14a", "authors": { "1": { "first_name": "Thomas", "last_name": "Vincent" } }, "date": "2014-10-02", "document": "http://iihm.imag.fr/publs/2014/PhD-ThomasVincent.pdf", "type": "Thèses et habilitations", "pages": "197" }, { "lang": "fr", "type_publi": "colloque", "title": "Techniques de Pointage à Distance : Cibles Numériques et Cibles Physique", "url": "https://ubimob2014.sciencesconf.org/42778", "abstract": "Au sein d’un environnement ubiquitaire, l’ordinateur devient évanescent : nos objets quotidiens sont augmentés d’électronique, les environnements deviennent perceptifs déconfinant l’interaction homme-machine de l’ancien ordinateur «boîte grise» à des espaces pervasifs. Désormais, l’utilisateur évolue dans un monde physico-numérique ou espace interactif mixte. Au sein de cet espace interactif, un besoin est alors d’interagir à distance que ce soit pour manipuler des objets numériques sur un écran distant ou des objets physiques. Cet article est dédié aux techniques de pointage à distance pour désigner un objet numérique ou physique. Nous décrivons six techniques de pointage pour interagir dans un environnement ubiquitaire, la première pour pointer à distance sur des cibles numériques, les cinq autres pour pointer sur des objets physiques avec et sans un dispositif mobile.", "authors": { "1": { "first_name": "Céline", "last_name": "Coutrix" }, "2": { "first_name": "William", "last_name": "Delamare" }, "3": { "first_name": "Maxime", "last_name": "Guillon" }, "4": { "first_name": "Takeshi", "last_name": "Kurata" }, "5": { "first_name": "François", "last_name": "Leitner" }, "6": { "first_name": "Laurence", "last_name": "Nigay" }, "7": { "first_name": "Thomas", "last_name": "Vincent" } }, "year": 2014, "uri": "http://iihm.imag.fr/publication/CDG+14a/", "pages": "5", "bibtype": "inproceedings", "id": 691, "abbr": "CDG+14a", "address": "Nice, France", "date": "2014-06-05", "document": "http://iihm.imag.fr/publs/2014/UBIMOB2014-DistantPointing.pdf", "type": "Autres conférences et colloques avec actes", "booktitle": "UbiMob2014 : 10èmes journées francophones Mobilité et Ubiquité" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "http://dx.doi.org/10.1007/978-3-642-40483-2_9", "title": "Precise pointing techniques for handheld Augmented Reality", "abstract": "We propose two techniques that improve accuracy of pointing at physical objects for handheld Augmented Reality (AR). In handheld AR, pointing accuracy is limited by both touch input and camera viewpoint instability due to hand jitter. The design of our techniques is based on the relationship between the touch input space and two visual reference frames for on-screen content, namely the screen and the physical object that one is pointing at. The first technique is based on Shift, a touch-based pointing technique, and video freeze, in order to combine the two reference frames for precise pointing. Contrastingly -without freezing the video-, the second technique offers a precise mode with a cursor that is stabilized on the physical object and controlled with relative touch inputs on the screen. Our experimental results show that our techniques are more accurate than the baseline techniques, namely direct touch on the video and screen-centered crosshair pointing.\r\n\r\n", "authors": { "1": { "first_name": "Thomas", "last_name": "Vincent" }, "2": { "first_name": "Laurence", "last_name": "Nigay" }, "3": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/VNK13a/", "pages": "122-139", "bibtype": "inproceedings", "id": 624, "abbr": "VNK13a", "address": "Cape Town, South Africa", "date": "2013-09-04", "document": "http://iihm.imag.fr/publs/2013/Precise_Pointing_Techniques_For_Handheld_AR.pdf", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of the 14th IFIP TC13 Conference on Human-Computer Interaction (Interact 2013)" }, { "lang": "en", "type_publi": "colcomlec", "doi": "http://doi.acm.org/10.1145/2534903.2534905", "title": "Handheld Augmented Reality: Effect of registration jitter on cursor-based pointing techniques", "url": "http://hal.inria.fr/hal-00875440", "abstract": "Handheld Augmented Reality relies on the registration of digital content on physical objects. Yet, the accuracy of this registration depends on environmental conditions. It is therefore important to study the impact of registration jitter on interaction and in particular on pointing at augmented objects where precision may be required. We present an experiment that compares the effect of registration jitter on the following two pointing techniques: (1) screen-centered crosshair pointing; and (2) relative pointing with a cursor bound to the physical object’s frame of reference and controlled by indirect relative touch strokes on the screen. The experiment considered both tablet and smartphone form factors. Results indicate that relative pointing in the frame of the physical object is less error prone and is less subject to registration jitter than screen-centered crosshair pointing.", "authors": { "1": { "first_name": "Thomas", "last_name": "Vincent" }, "2": { "first_name": "Laurence", "last_name": "Nigay" }, "3": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/VNK13c/", "pages": "1-6", "bibtype": "inproceedings", "id": 649, "abbr": "VNK13c", "address": "Bordeaux, France", "date": "2013-11-13", "document": "http://iihm.imag.fr/publs/2013/IHM13_HARPointing.pdf", "type": "Conférences nationales avec comité de lecture sur texte complet", "booktitle": "Actes de la 25ème conférence francophone sur l'Interaction Homme-Machine (IHM 2013)" }, { "lang": "fr", "type_publi": "colloque", "title": "Relations spatiales en Réalité Augmentée sur dispositifs mobiles", "url": "http://ubimob2013.sciencesconf.org/19693", "abstract": "Les dispositifs mobiles \u0013étant de plus en plus puissants et int\u0013égrant de nombreux capteurs, il est maintenant possible de superposer des images num\u0013ériques \u0012a la vue du monde physique retourn\u0013ée par la cam\u0013éra. Le terme Réalité Augment\u0013ée est d\u0013ésormais couramment utilis\u0013é et cette technique est employ\u0013ée dans de nombreux domaines. Dans ce contexte, cet article \u0013étudie les relations spatiales mises en jeu lors de l'interaction avec cet environnement mixte compos\u0013é de la vue du monde physique augment\u0013ée d'\u0013él\u0013éments num\u0013ériques et a\u000effich\u0013é sur l'\u0013écran du dispositif mobile. Nous nous int\u0013éressons en particulier \u0012a deux relations spatiales : l'une entre l'objet physique et le dispositif mobile et l'autre entre le dispositif mobile et l'utilisateur. Nous pr\u0013ésentons des exemples d'application de R\u0013éalit\u0013é Augment\u0013ée sur dispositifs mobiles qui exploitent ces relations spatiales pour l'interaction.", "authors": { "1": { "first_name": "Thomas", "last_name": "Vincent" }, "2": { "first_name": "Sébastien", "last_name": "Pelurson" }, "3": { "first_name": "Valentin", "last_name": "Regazzoni" }, "4": { "first_name": "Takeshi", "last_name": "Kurata" }, "5": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/VPR+13a/", "id": 632, "bibtype": "inproceedings", "abbr": "VPR+13a", "address": "Nancy, France", "date": "2013-06-05", "document": "http://iihm.imag.fr/publs/2013/Ubimob13_Final.pdf", "type": "Autres conférences et colloques avec actes", "booktitle": "Actes des 9èmes journées francophones Mobilité et Ubiquité (UbiMob 2013)" }, { "lang": "en", "publisher": "IEEE", "doi": "http://dx.doi.org/10.1109/ISMAR.2013.6671801", "bibtype": "inproceedings", "title": "Photo-shoot localization of a mobile camera based on registered frame data of virtualized reality models ", "url": "http://amie.imag.fr/Main/Publications?action=download&upname=ISMAR2013_makita.pdf", "abstract": "This paper presents a study of a method for estimating the position and orientation of a photo-shoot in indoor environments for augmented reality applications. Our proposed localization method is based on registered frame data of virtualized reality models, which are photos with known photo-shoot positions and orientations, and depth data. Because registered frame data are secondary product of modeling process, additional works are not necessary to create registered frame data especially for the localization. In the method, a photo taken by a mobile camera is compared to registered frame data for the localization. Since registered frame data are linked with photo-shoot position, orientation, and depth data, 3D coordinates of each pixel on the photo of registered frame data is available. We conducted experiments with employing five techniques of the estimation for comparative evaluations.", "authors": { "1": { "first_name": "Koji", "last_name": "Makita" }, "2": { "first_name": "Jun", "last_name": "Nishida" }, "3": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "6": { "first_name": "Thomas", "last_name": "Vincent" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Jun", "last_name": "Yamashita" }, "9": { "first_name": "Hideaki", "last_name": "Kuzuoka" }, "10": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/MNI+13a/", "pages": "273-274", "note": "Poster", "id": 668, "abbr": "MNI+13a", "address": "Adelaide, SA, Autralia", "date": "2013-10-01", "type": "Autres conférences et colloques avec actes", "booktitle": "Proceedings of the 2013 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "type_publi": "colloque" }, { "lang": "en", "type_publi": "autre", "title": "Handheld Augmented Reality: Spatial Relationships and Frames of Reference", "url": "http://studierstube.icg.tugraz.at/mobilehci2013workshop/", "abstract": "Handheld Augmented Reality (AR), one type of mobile AR, relies on the spatial coupling between the real world and the on-screen augmented reality outputs. Such spatial coupling introduces new design challenges for interaction and in particular for precise pointing techniques that go beyond mobile interaction design challenges (e.g., screen real estate, fat-finger problem with direct touch on the screen). Focusing on the design of touch interaction techniques for handheld AR, we propose two conceptual design elements, namely spatial relationship and frame of reference, as part of a conceptual framework organized according to four main entities: (1) the control space, (2) the digital augmentation (3) the representation of the physical world and (4) the physical world.", "authors": { "1": { "first_name": "Thomas", "last_name": "Vincent" }, "2": { "first_name": "Laurence", "last_name": "Nigay" }, "3": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/VNK13b/", "id": 652, "bibtype": "unpublished", "abbr": "VNK13b", "address": "Munich, Germany", "date": "2013-08-27", "type": "Autres publications", "booktitle": "Workshop on Designing Mobile Augmented Reality at MobileHCI 2013" }, { "lang": "en", "type_publi": "autre", "title": "Photo-shoot localization for mobile AR based on registered images in virtualized reality models", "url": "http://amie.imag.fr/Main/Publications?action=download&upname=KJMR2013_abstract_nishida.pdf", "abstract": "This paper presents a study on estimating photo-shoot location and orientation in indoor environments for augmented reality applications. The proposed method is based on registered images in virtualized reality models [1]. Registered images are secondary products of model creation, and contain photo-shoot location, orientation, and depth information. Therefore, additional data for special purposes need not be created. The upper and lower left parts of Figure 1 show the system architecture and data flow. We assume that the proposed method is used in combination with a relative positioning system, for example, Pedestrian Dead Reckoning (PDR). In the proposed system, a real image taken by a mobile device is sent to a server, and the image is compared with the registered images.\r\nIn this study, we compare the localization performance of the proposed system using mutual information, edge information, SURF, ORB, and FREAK features.", "authors": { "1": { "first_name": "Jun", "last_name": "Nishida" }, "2": { "first_name": "Koji", "last_name": "Makita" }, "3": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "6": { "first_name": "Thomas", "last_name": "Vincent" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Jun", "last_name": "Yamashita" }, "9": { "first_name": "Hideaki", "last_name": "Kuzuoka" }, "10": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/NMI+13a/", "id": 669, "bibtype": "unpublished", "abbr": "NMI+13a", "address": "Okinawa, Japon", "date": "2013-04-12", "type": "Autres publications", "booktitle": "Workshop KJMR 2013, The 6th Korea-Japan Workshop on Mixed Reality" }, { "lang": "en", "type_publi": "autre", "title": "Classifying handheld Augmented Reality: Three categories linked by spatial mappings", "abstract": "Handheld Augmented Reality (AR) relies on a spatial coupling of the on-screen content with the physical surrounding. To help the design of such systems and to classify existing AR systems, we present a framework made of three categories and two spatial relationships. Our framework highlights spatial relationships between the physical world, the representation of the physical world on screen and the augmentation on screen. Within this framework, we study the relaxing of the spatial coupling between the digital information and the physical surrounding in order to enhance interaction by breaking the constraints of physical world interaction.", "address": "Atlanta, USA", "year": 2012, "uri": "http://iihm.imag.fr/publication/VNK12a/", "id": 610, "bibtype": "unpublished", "abbr": "VNK12a", "authors": { "1": { "first_name": "Thomas", "last_name": "Vincent" }, "2": { "first_name": "Laurence", "last_name": "Nigay" }, "3": { "first_name": "Takeshi", "last_name": "Kurata" } }, "date": "2012-11-05", "document": "http://iihm.imag.fr/publs/2012/workshop_classifAR_final.pdf", "type": "Autres publications", "booktitle": "Workshop on Classifying the AR Presentation Space at ISMAR '12" }, { "lang": "en", "type_publi": "autre", "title": "Handheld AR/AV system using PDR localization and image based localization with virtualized reality models", "url": "http://ismar.vgtc.org/ismar/2012/info/overview/demos", "abstract": "Our demo will show a handheld AR/AV (Augmented Virtuality) system for indoor navigation to destinations and displaying detailed instructions of target objects with contextual interaction. A localization method of the system is based on two crucial functions, PDR (Pedestrian Dead Reckoning) localization, and image based localization. The main feature of the demo is a complementary use of PDR and image based method with virtualized reality models. PDR is realized with the built-in sensors (3-axis accelerometers, gyroscopes and magnetometers) in waist-mounted device for estimating position and direction on 2D map. An accuracy of the PDR localization is improved with map matching and image based localization. Maps of the environment for map matching are automatically created with virtualized reality models. Image based localization is realized with matching phase and tracking phase for estimating 6-DoF (degree of freedom) extrinsic camera parameters. In matching phase, correspondence between reference images included in virtualized reality models and images from the camera of the handheld device is used. An output of the PDR localization is used for an efficient searching of reference images. In tracking phase, interest point-tracking on images from the camera is used for relative motion estimation.", "authors": { "1": { "first_name": "Koji", "last_name": "Makita" }, "2": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "3": { "first_name": "Thomas", "last_name": "Vincent" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Jun", "last_name": "Nishida" }, "6": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2012, "uri": "http://iihm.imag.fr/publication/MKV+12a/", "id": 611, "bibtype": "unpublished", "abbr": "MKV+12a", "address": "Atlanta, USA", "date": "2012-11-07", "type": "Autres publications", "booktitle": "Demonstration, Conference ISMAR 2012, 11th IEEE International Symposium on Mixed and Augmented Reality, Atlanta, Georgia, USA, November 5-8, 2012" }, { "lang": "en", "type_publi": "autre", "title": "A benchmark data generation tool using walking simulation and virtualized reality models for evaluating AR visual tracking", "url": "http://amie.imag.fr/Main/Publications?action=download&upname=KJMR2012_abstract_makita.pdf", "abstract": "This paper describes a tool to generate benchmark data sets using walking simulation and virtualized reality models for evaluating visual tracking methods for augmented reality (AR).", "authors": { "1": { "first_name": "Koji", "last_name": "Makita" }, "2": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "3": { "first_name": "Takashi", "last_name": "Okuma" }, "4": { "first_name": "Thomas", "last_name": "Vincent" }, "5": { "first_name": "Laurence", "last_name": "Nigay" }, "6": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2012, "uri": "http://iihm.imag.fr/publication/MIO+12a/", "id": 670, "bibtype": "unpublished", "abbr": "MIO+12a", "address": "Seoul, Korea", "date": "2012-04-14", "type": "Autres publications", "booktitle": "Workshop KJMR 2012, The 5th Korea-Japan Workshop on Mixed Reality" }, { "lang": "en", "type_publi": "colcomlec", "doi": "http://dx.doi.org/10.1145/2044354.2044362", "title": "TouchOver: Decoupling Positioning from Selection on Touch-based Handheld Devices", "abstract": "When compared to conventional desktop mouse input, touch input on handheld devices suffers from the lack of a main feature: that of a mouseover state that can provide users with dynamic pro-active information. In addition, with touch screens, selection precision is limited by undesired extra finger tracking during finger press and lift movements.\r\n\r\nWe propose TouchOver, a multi-modal input technique for touch-screen accelerometers-enabled handheld devices where positioning is performed with a finger on the touch surface, while selection is triggered by a gentle \"tilt forward\" of the device. By doing so, TouchOver adds a mouseover-like state and improves selection precision while remaining compatible with existing interaction techniques such as Shift devised to improve precision. Our formal user study shows a significant precision improvement over two other selection techniques as well as a good tradeoff between speed and accuracy.\r\n", "authors": { "1": { "first_name": "Adriano", "last_name": "Scoditti" }, "2": { "first_name": "Thomas", "last_name": "Vincent" }, "3": { "first_name": "Joëlle", "last_name": "Coutaz" }, "4": { "first_name": "Renaud", "last_name": "Blanch" }, "5": { "first_name": "Nadine", "last_name": "Mandran" } }, "year": 2011, "uri": "http://iihm.imag.fr/publication/SVC+11a/", "pages": "37-40", "bibtype": "inproceedings", "id": 574, "abbr": "SVC+11a", "address": "Nice, France", "date": "2011-10-25", "document": "http://iihm.imag.fr/publs/2011/touchover_ihm2011.pdf", "type": "Conférences nationales avec comité de lecture sur texte complet", "booktitle": "Actes de la 23ème conférence francophone sur l'Interaction Homme-Machine (IHM 2011)" }, { "lang": "en", "type_publi": "autre", "title": "Handheld AR/AV indoor navigation and detailed information with contextual interaction", "url": "http://ismar2011.vgtc.org/demos.html", "abstract": "The demonstration shows a handheld system for indoor navigation to a specific exhibit item followed by detailed information about the exhibit with contextual AR/AV interaction. The system provides the following four key functions: \r\n\r\n(1) Indoor navigation based on a PDR (Pedestrian Dead Reckoning) localization method combined with map matching using the built-in sensors (3-axis accelerometers, gyroscopes and magnetometers) of the waist mounted device.\r\n\r\n(2) Coarse estimation of location and orientation by making correspondence between Virtualized-Reality (VR) models of environments and images from the handheld device camera.\r\n\r\n(3) Fine estimation of location and attitude of the handheld device based on visual AR tracking methods.\r\n\r\n(4) Contextual AR/AV (Augmented Virtuality) interaction widgets (e.g., buttons and menus) that provide detailed information about the exhibit. Widgets are contextual according to the relative position of the user to the exhibit.\r\n\r\nAny participant can experience the AR/AV system, by being directed to search for a target exhibit to obtain further detailed information about the exhibit.\r\n\r\nWhat makes our demonstration unique is the integration of indoor navigation capabilities with interactive AR/AV functionalities for augmenting an exhibit.", "year": 2011, "uri": "http://iihm.imag.fr/publication/KMV+11a/", "id": 584, "bibtype": "unpublished", "abbr": "KMV+11a", "authors": { "1": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "2": { "first_name": "Koji", "last_name": "Makita" }, "3": { "first_name": "Thomas", "last_name": "Vincent" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Jun", "last_name": "Nishida" }, "6": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Takeshi", "last_name": "Kurata" } }, "date": "2011-10-26", "type": "Autres publications", "booktitle": "Demonstration, Conference ISMAR 2011, 10th IEEE International Symposium on Mixed and Augmented Reality, Switzerland, October 26-29, 2011" }]);