publications([{ "lang": "en", "publisher": "Springer", "doi": "http://dx.doi.org/10.1007/978-4-431-55861-3_8", "title": "Mixed Reality Navigation on a Tablet Computer for Supporting Machine Maintenance in Wide-area Indoor Environment ", "abstract": "This paper describes a maintenance service support system for wide-area indoor environment, such as a factory and a hospital. In maintenance services, operators often have to check a map to find out a way to a target machine, and also have to refer documents to get information about check-up and repair of the machine. In order to reduce working load of operators, information technology can help operators carry out additional but important operations during maintenance, such as referring documents and maps, recording maintenance logs and so on. In this paper, we propose mixed reality navigation on a tablet computer composed of augmented virtuality mode and augmented reality mode. Augmented virtuality mode performs map-based navigation shows positions of the user and the target machine. Augmented reality mode performs intuitive visualization of information about the machine by overlaying annotations on camera images. The proposed system is based on a hybrid localization technique realized with pedestrian dead reckoning (PDR) and 3D model-based image processing for the purpose of covering wide-area indoor environment. Experimental results using our prototype with a mock-up model of a machine are also described for showing feasibility of our concept in the paper. ", "authors": { "1": { "first_name": "Koji", "last_name": "Makita" }, "2": { "first_name": "Thomas", "last_name": "Vincent" }, "3": { "first_name": "Soichi", "last_name": "Ebisuno" }, "4": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "5": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "6": { "first_name": "Takashi", "last_name": "Okuma" }, "7": { "first_name": "Minoru", "last_name": "Yoshida" }, "8": { "first_name": "Laurence", "last_name": "Nigay" }, "9": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2014, "uri": "http://iihm.imag.fr/publication/MVE+14a/", "pages": "41-47", "bibtype": "inproceedings", "id": 705, "abbr": "MVE+14a", "address": "Yokohama, Japan", "date": "2014-09-14", "document": "http://iihm.imag.fr/publs/2014/ICServ2014.pdf", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Conference Proceedings of ICServ2014, the 2nd International Conference on Serviceology", "type_publi": "icolcomlec" }, { "lang": "en", "publisher": "IEEE", "doi": "http://dx.doi.org/10.1109/ISMAR.2013.6671801", "bibtype": "inproceedings", "title": "Photo-shoot localization of a mobile camera based on registered frame data of virtualized reality models ", "url": "http://amie.imag.fr/Main/Publications?action=download&upname=ISMAR2013_makita.pdf", "abstract": "This paper presents a study of a method for estimating the position and orientation of a photo-shoot in indoor environments for augmented reality applications. Our proposed localization method is based on registered frame data of virtualized reality models, which are photos with known photo-shoot positions and orientations, and depth data. Because registered frame data are secondary product of modeling process, additional works are not necessary to create registered frame data especially for the localization. In the method, a photo taken by a mobile camera is compared to registered frame data for the localization. Since registered frame data are linked with photo-shoot position, orientation, and depth data, 3D coordinates of each pixel on the photo of registered frame data is available. We conducted experiments with employing five techniques of the estimation for comparative evaluations.", "authors": { "1": { "first_name": "Koji", "last_name": "Makita" }, "2": { "first_name": "Jun", "last_name": "Nishida" }, "3": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "6": { "first_name": "Thomas", "last_name": "Vincent" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Jun", "last_name": "Yamashita" }, "9": { "first_name": "Hideaki", "last_name": "Kuzuoka" }, "10": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/MNI+13a/", "pages": "273-274", "note": "Poster", "id": 668, "abbr": "MNI+13a", "address": "Adelaide, SA, Autralia", "date": "2013-10-01", "type": "Autres conférences et colloques avec actes", "booktitle": "Proceedings of the 2013 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "type_publi": "colloque" }, { "lang": "en", "type_publi": "autre", "title": "Photo-shoot localization for mobile AR based on registered images in virtualized reality models", "url": "http://amie.imag.fr/Main/Publications?action=download&upname=KJMR2013_abstract_nishida.pdf", "abstract": "This paper presents a study on estimating photo-shoot location and orientation in indoor environments for augmented reality applications. The proposed method is based on registered images in virtualized reality models [1]. Registered images are secondary products of model creation, and contain photo-shoot location, orientation, and depth information. Therefore, additional data for special purposes need not be created. The upper and lower left parts of Figure 1 show the system architecture and data flow. We assume that the proposed method is used in combination with a relative positioning system, for example, Pedestrian Dead Reckoning (PDR). In the proposed system, a real image taken by a mobile device is sent to a server, and the image is compared with the registered images.\r\nIn this study, we compare the localization performance of the proposed system using mutual information, edge information, SURF, ORB, and FREAK features.", "authors": { "1": { "first_name": "Jun", "last_name": "Nishida" }, "2": { "first_name": "Koji", "last_name": "Makita" }, "3": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "6": { "first_name": "Thomas", "last_name": "Vincent" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Jun", "last_name": "Yamashita" }, "9": { "first_name": "Hideaki", "last_name": "Kuzuoka" }, "10": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2013, "uri": "http://iihm.imag.fr/publication/NMI+13a/", "id": 669, "bibtype": "unpublished", "abbr": "NMI+13a", "address": "Okinawa, Japon", "date": "2013-04-12", "type": "Autres publications", "booktitle": "Workshop KJMR 2013, The 6th Korea-Japan Workshop on Mixed Reality" }, { "lang": "en", "type_publi": "autre", "title": "Handheld AR/AV system using PDR localization and image based localization with virtualized reality models", "url": "http://ismar.vgtc.org/ismar/2012/info/overview/demos", "abstract": "Our demo will show a handheld AR/AV (Augmented Virtuality) system for indoor navigation to destinations and displaying detailed instructions of target objects with contextual interaction. A localization method of the system is based on two crucial functions, PDR (Pedestrian Dead Reckoning) localization, and image based localization. The main feature of the demo is a complementary use of PDR and image based method with virtualized reality models. PDR is realized with the built-in sensors (3-axis accelerometers, gyroscopes and magnetometers) in waist-mounted device for estimating position and direction on 2D map. An accuracy of the PDR localization is improved with map matching and image based localization. Maps of the environment for map matching are automatically created with virtualized reality models. Image based localization is realized with matching phase and tracking phase for estimating 6-DoF (degree of freedom) extrinsic camera parameters. In matching phase, correspondence between reference images included in virtualized reality models and images from the camera of the handheld device is used. An output of the PDR localization is used for an efficient searching of reference images. In tracking phase, interest point-tracking on images from the camera is used for relative motion estimation.", "authors": { "1": { "first_name": "Koji", "last_name": "Makita" }, "2": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "3": { "first_name": "Thomas", "last_name": "Vincent" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Jun", "last_name": "Nishida" }, "6": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Takeshi", "last_name": "Kurata" } }, "year": 2012, "uri": "http://iihm.imag.fr/publication/MKV+12a/", "id": 611, "bibtype": "unpublished", "abbr": "MKV+12a", "address": "Atlanta, USA", "date": "2012-11-07", "type": "Autres publications", "booktitle": "Demonstration, Conference ISMAR 2012, 11th IEEE International Symposium on Mixed and Augmented Reality, Atlanta, Georgia, USA, November 5-8, 2012" }, { "lang": "en", "type_publi": "autre", "title": "Handheld AR/AV indoor navigation and detailed information with contextual interaction", "url": "http://ismar2011.vgtc.org/demos.html", "abstract": "The demonstration shows a handheld system for indoor navigation to a specific exhibit item followed by detailed information about the exhibit with contextual AR/AV interaction. The system provides the following four key functions: \r\n\r\n(1) Indoor navigation based on a PDR (Pedestrian Dead Reckoning) localization method combined with map matching using the built-in sensors (3-axis accelerometers, gyroscopes and magnetometers) of the waist mounted device.\r\n\r\n(2) Coarse estimation of location and orientation by making correspondence between Virtualized-Reality (VR) models of environments and images from the handheld device camera.\r\n\r\n(3) Fine estimation of location and attitude of the handheld device based on visual AR tracking methods.\r\n\r\n(4) Contextual AR/AV (Augmented Virtuality) interaction widgets (e.g., buttons and menus) that provide detailed information about the exhibit. Widgets are contextual according to the relative position of the user to the exhibit.\r\n\r\nAny participant can experience the AR/AV system, by being directed to search for a target exhibit to obtain further detailed information about the exhibit.\r\n\r\nWhat makes our demonstration unique is the integration of indoor navigation capabilities with interactive AR/AV functionalities for augmenting an exhibit.", "year": 2011, "uri": "http://iihm.imag.fr/publication/KMV+11a/", "id": 584, "bibtype": "unpublished", "abbr": "KMV+11a", "authors": { "1": { "first_name": "Masakatsu", "last_name": "Kourogi" }, "2": { "first_name": "Koji", "last_name": "Makita" }, "3": { "first_name": "Thomas", "last_name": "Vincent" }, "4": { "first_name": "Takashi", "last_name": "Okuma" }, "5": { "first_name": "Jun", "last_name": "Nishida" }, "6": { "first_name": "Tomoya", "last_name": "Ishikawa" }, "7": { "first_name": "Laurence", "last_name": "Nigay" }, "8": { "first_name": "Takeshi", "last_name": "Kurata" } }, "date": "2011-10-26", "type": "Autres publications", "booktitle": "Demonstration, Conference ISMAR 2011, 10th IEEE International Symposium on Mixed and Augmented Reality, Switzerland, October 26-29, 2011" }]);