@inproceedings{oai:sucra.repo.nii.ac.jp:00009389, author = {Kuzuoka, Hideaki and Yamashita, Jun and Yamazaki, Keiichi and Yukioka, Tetsuo and Ohta, Shoichi and Luff, Paul and 久野, 義徳}, book = {Ninth European Conference on Computer-Supported Cooperative Work (ECSCW)}, month = {}, note = {This paper reports on two technologies we have developed to support rich interaction between distributed personnel, Developing on our previous investigations with remote projections and pointing devices we have begun to investigate how we might facilitate particularly complex interactions, namely when remotely instructing coparticipants in emergency situations. We introduce two systems GestureView and GestureLaser that provide different ways of embodying actions in a remote location., 総務省戦略的情報通信研究開発推進制度(SCOPE) 特定領域重点型研究開発 次世代ヒューマンインタフェース・コンテンツ技術 視覚情報に基づく人間とロボットの対面およびネットワークコミュニケーション(051303007) 平成17年度〜平成19年度 総務省戦略的情報通信研究開発推進制度(SCOPE) 研究成果報告書(平成20年3月) 研究代表者 久野 義徳(埼玉大学大学院理工学研究科 教授)より転載, text}, publisher = {Springer}, title = {Working hands: Embodying interaction for healthcare}, year = {2005}, yomi = {クノ, ヨシノリ} }