% % This file was created by the Typo3 extension % sevenpack version 0.7.14 % % --- Timezone: CEST % Creation date: 2017-05-23 % Creation time: 12-51-34 % --- Number of references % 16 % @Article { CaniardBT2014, title = {Action can amplify motion-induced illusory displacement}, journal = {Frontiers in Human Neuroscience}, year = {2015}, month = {1}, volume = {8}, number = {1058}, pages = {1-14}, abstract = {Local motion is known to produce strong illusory displacement in the perceived position of globally static objects. For example, if a dot-cloud or grating drifts to the left within a stationary aperture, the perceived position of the whole aperture will also be shifted to the left. Previously, we used a simple tracking task to demonstrate that active control over the global position of an object did not eliminate this form of illusion. Here, we used a new iPad task to directly compare the magnitude of illusory displacement under active and passive conditions. In the active condition, participants guided a drifting Gabor patch along a virtual slalom course by using the tilt control of an iPad. The task was to position the patch so that it entered each gate at the direct center, and we used the left/right deviations from that point as our dependent measure. In the passive condition, participants watched playback of standardized trajectories along the same course. We systematically varied deviation from midpoint at gate entry, and participants made 2AFC left/right judgments. We fitted cumulative normal functions to individual distributions and extracted the PSE as our dependent measure. To our surprise, the magnitude of displacement was consistently larger under active than under passive conditions. Importantly, control conditions ruled out the possibility that such amplification results from lack of motor control or differences in global trajectories as performance estimates were equivalent in the two conditions in the absence of local motion. Our results suggest that the illusion penetrates multiple levels of the perception-action cycle, indicating that one important direction for the future of perceptual illusions may be to more fully explore their influence during active vision.}, department = {Department B{\"u}lthoff}, web_url = {http://journal.frontiersin.org/Journal/10.3389/fnhum.2014.01058/pdf}, DOI = {10.3389/fnhum.2014.01058}, author = {Caniard, F and B{\"u}lthoff, HH and Thornton, IM} } @Inproceedings { CaniardBMLT2011, title = {Active control does not eliminate motion-induced illusory displacement}, year = {2011}, month = {8}, pages = {101-108}, abstract = {When the sine-wave grating of a Gabor patch drifts to the left or right, the perceived position of the entire object is shifted in the direction of local motion. In the current paper, we explored whether active control of the physical position of the patch can overcome such motion induced illusory displacement. We created a simple computer game and asked participants to continuously guide a Gabor patch along a randomly curving path. When the grating inside the Gabor patch was stationary, participants could perform this task without error. When the grating drifted to either left or right, we observed systematic errors consistent with previous reports of motion-induced illusory displacement. Specifically, when the grating drifted to the right, participants adjusted the global position of the patch to the left of the target line, and when it drifted to the left, errors were to the right of the line. The magnitude of the errors was consistent with previously reported perceptual judgements for centrally presented items, and scaled systematically with the speed of local drift. Importantly, we found no evidence that participants could adapt or compensate for illusory displacement given active control of the target. The current findings could have important implications for interface design, suggesting that local dynamic components of a display could affect perception and action within the more global application environment.}, url = {http://www.kyb.tuebingen.mpg.defileadmin/user_upload/files/publications/2011/APGV-2011-caniard.pdf}, department = {Department B{\"u}lthoff}, web_url = {http://www.apgv.org/archive/apgv11/}, publisher = {ACM Press}, address = {New York, NY, USA}, event_place = {Toulouse, France}, event_name = {8th Symposium on Applied Perception in Graphics and Visualization (APGV 2011)}, ISBN = {978-1-4503-0889-2}, DOI = {10.1145/2077451.2077470}, author = {Caniard, F and B{\"u}lthoff, HH and Mamassian, P and Lee, S-W and Thornton, IM} } @Inproceedings { 4651, title = {Distortion in 3D shape estimation with changes in illumination}, year = {2007}, month = {7}, pages = {99-105}, abstract = {In many domains it is very important that observers form an accurate percept of 3-dimensional structure from 2-dimensional images of scenes or objects. This is particularly relevant for designers who need to make decisions concerning the refinement of novel objects that haven't been physically built yet. This study presents the results of two experiments whose goal was to test the effect of lighting direction on the shape perception of smooth surfaces using shading and lighting techniques commonly used in modeling and design software. The first experiment consisted of a 2 alternate forced choice task which compared the effect of the amount of shape difference between smooth surfaces lit by a single point light with whether the position of the light sources were the same or different for each surface. Results show that, as the difference between the shapes decreased, participants were more and more biased towards choosing the match shape lit by the same source as the test shape. In the second experiment, participants had to report the orientation at equivalent probe locations on pairs of smooth surfaces presented simultaneously, using gauge figures. The surfaces could either be the same or slightly different and the light source of each shape could either be at the same relative location or offset by 90\(^{\circ}\) horizontally. Participants reported large differences in surface orientation when the lighting condition was different, even when the shapes were the same, confirming the first results. Our findings show that lighting conditions can have a strong effect on 3-dimensional perception, and suggest that great care should be taken when projection systems are used for 3D visualisation where an accurate representation is required, either by carefully choosing lighting conditions or by using more realistic rendering techniques.}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/apgv07-99_[0].pdf}, department = {Department B{\"u}lthoff}, web_url = {http://www.apgv.de/}, editor = {Wallraven, C. , V. Sundstedt}, publisher = {ACM Press}, address = {New York, NY, USA}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {T{\"u}bingen, Germany}, event_name = {4th Symposium on Applied Perception in Graphics and Visualization (APGV 2007)}, language = {en}, ISBN = {978-1-59593-670-7}, DOI = {10.1145/1272582.1272602}, author = {Caniard, F and Fleming, RW} } @Inproceedings { 3466, title = {Influence of Auditory Cues on the visually-induced Self-Motion Illusion (Circular Vection) in Virtual Reality}, year = {2005}, month = {9}, pages = {49-57}, abstract = {This study investigated whether the visually induced selfmotion illusion (“circular vection”) can be enhanced by adding a matching auditory cue (the sound of a fountain that is also visible in the visual stimulus). Twenty observers viewed rotating photorealistic pictures of a market place projected onto a curved projection screen (FOV: 54\(^{\circ}\)x45\(^{\circ}\)). Three conditions were randomized in a repeated measures within-subject design: No sound, mono sound, and spatialized sound using a generic head-related transfer function (HRTF). Adding mono sound increased convincingness ratings marginally, but did not affect any of the other measures of vection or presence. Spatializing the fountain sound, however, improved vection (convincingness and vection buildup time) and presence ratings significantly. Note that facilitation was found even though the visual stimulus was of high quality and realism, and known to be a powerful vection-inducing stimulus. Thus, HRTF-based auralization using headphones can be employed to improve visual VR simulations both in terms of self-motion perception and overall presence.}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/Riecke__05_paper4Presence2005_Influence_of_Auditory_Cues_on_the_visually-induced_Self-Motion_Illusion_-Circular_Vection-_in_Virtual_Reality_3466[0].pdf}, department = {Department B{\"u}lthoff}, web_url = {http://astro.temple.edu/\verb=~=lombard/ISPR/Proceedings/2005/Riecke,\%20Schulte-Pelkum,\%20Caniard,\%20Bulthoff.pdf}, editor = {Slater, M.}, publisher = {University College London}, address = {London, UK}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {London, UK}, event_name = {8th Annual International Workshop on Presence (PRESENCE 2005)}, language = {en}, ISBN = {0-9551232-0-8}, author = {Riecke, BE and Schulte-Pelkum, J and Caniard, F and B{\"u}lthoff, HH} } @Inproceedings { 2904, title = {Towards Lean and Elegant Self-Motion Simulation in Virtual Reality}, year = {2005}, month = {3}, pages = {131-138}, abstract = {Despite recent technological advances, convincing self-motion simulation in virtual reality (VR) is difficult to achieve, and users often suffer from motion sickness and/or disorientation in the simulated world. Instead of trying to simulate self-motions with physical realism (as is often done for, e.g., driving or flight simulators), we propose in this paper a perceptually oriented approach towards self-motion simulation. Following this paradigm, we performed a series of psychophysical experiments to determine essential visual, auditory, and vestibular/tactile parameters for an effective and perceptually convincing self-motion simulation. These studies are a first step towards our overall goal of achieving lean and elegant self-motion simulation in virtual reality (VR) without physically moving the observer. In a series of psychophysical experiments about the self-motion illusion (circular vection), we found that (i) vection as well as presence in the simulated environment is increased by a consistent, naturalistic visual scene when compared to a sliced, inconsistent version of the identical scene, (ii) barely noticeable marks on the projection screen can increase vection as well as presence in an unobtrusive manner, (iii) physical vibrations of the observer's seat can enhance the vection illusion, and (iv) spatialized 3D audio cues embedded in the simulated environment increase the sensation of self-motion and presence. We conclude that providing consistent cues about self-motion to multiple sensory modalities can enhance vection, even if physical motion cues are absent. These results yield important implications for the design of lean and elegant self-motion simulators.}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/pdf2904.pdf}, department = {Department B{\"u}lthoff}, web_url = {http://www.vr2005.org}, editor = {Fr{\"o}hlich, B.}, publisher = {IEEE Computer Society}, address = {Piscataway, NJ, USA}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {Bonn, Germany}, event_name = {IEEE Conference on Virtual Reality (VR '05)}, ISBN = {0-7803-8929-8}, DOI = {10.1109/VR.2005.83}, author = {Riecke, BE and Schulte-Pelkum, J and Caniard, F and B{\"u}lthoff, HH} } @Techreport { 4187, title = {Spatialized auditory cues enhance the visually-induced self-motion illusion (circular vection) in Virtual Reality}, year = {2005}, month = {10}, number = {138}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/Riecke_05_TR-138__Spatialized\%20auditory\%20cues\%20enhance\%20the\%20visually-induced\%20self-motion\%20illusion\%20(circular\%20vection)\%20in\%20Virtual\%20Reality_[0].pdf}, department = {Department B{\"u}lthoff}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, institution = {Max Planck Institute for Biological Cybernetics, T{\"u}bingen, Germany}, language = {en}, digital = {1}, author = {Riecke, BE and Schulte-Pelkum, J and Caniard, F and B{\"u}lthoff, HH} } @Poster { 4883, title = {Lighting Direction Affects Perceived Shape from Shading}, year = {2007}, month = {7}, volume = {10}, pages = {110}, abstract = {It has been known for a long time that many cues contribute to the perception of 3D shape from 2D images, such as shape from shading, textures, occlusions or reflection of the surrounding environment. However, little is known about the influence of lighting conditions on the correct mental reconstruction of 3D shapes. In order to investigate this, we have run a set of experiments asking participants to report differences in surface orientation of unknown, smooth surfaces, using different methods. The first experiment consisted of a 2AFC in which subjects had to identify which of two test objects had the same shape as the target. The stimuli were computer generated irregularly-shaped smooth surfaces, illuminated by a single point light source. For both test stimuli, the position of the light sources could either be different from or the same as the target. Results show that, as the amount of shape difference became smaller, participants were more and more biased towards choosing the match shape lit by the same source as the target. In the second experiment, participants had to report the perceived orientation of the surfaces at various locations by adjusting gauge figures.. The surfaces could either be the same or slightly different and the light source of each shape could either be the same or offset by 90 degrees horizontally. Participants’ matches revealed large differences in perceived surface orientations when the lighting was different, even when the shapes were the same, confirming the first results. Our findings show that lighting conditions can play a substantial role in the perception of 3D structure of objects from their 2D representation. We also discuss the implication of this in the domain of computer aided visualisation.}, department = {Department B{\"u}lthoff}, web_url = {http://www.twk.tuebingen.mpg.de/twk07/abstract.php?_load_id=caniard01}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {T{\"u}bingen, Germany}, event_name = {10th T{\"u}binger Wahrnehmungskonferenz (TWK 2007)}, language = {en}, author = {Caniard, F and Fleming, RW} } @Poster { FriedrichCTCM2006, title = {Motion-induced localization bias in an action task}, year = {2006}, month = {4}, day = {4}, pages = {9}, abstract = {DeValois and DeValois (Vis Research, 31, 1619-1626) have shown that a moving carrier behind a stationary window can cause a perceptual misplacement of this envelope in direction of motion. The authors also found that the bias increased with increasing carrier speed and eccentrcity. Yamagishi et al. (2001, Proceedings of the Royal Society, 268, 973-977) showed that this effect can also be found in visuo-motor tasks. To see whether variables such as eccentricity and grating speed increase the motion-induced perceptual shift of a motion field also in an action task, a motor-control experiment was created in which these variables were manipulated (eccentricity values: 0 deg, 8.4 deg and 16.8; speed values: 1.78 deg/sec, 4.45 deg/sec and 7.1 deg/sec). Participants had to keep a downward-sliding path aligned with a motion field (stationary Gaussian and horizontally moving carrier) by manipulating the path with a joystick. The perceptual bias can be measured by comparing the average difference between correct and actual path position. Both speed and eccentricty had a significant impact on the bias size. Similarly to the recognition task, the bias size increased with increasing carrier speed. Contrary to DeValois and DeValois’ finding, here the perceptual shift decreased with increasing eccentricity. There was no interaction of the variables. If we assume an ecological reason for the existence of a motion-induced bias, it might be plausible to see why the bias is smaller in an unnatural task such as actively manipulating an object that is in an eccentric position in the visual field (hence the decrease of bias magnitude in the periphery). Contrary to this, recognition tasks carried out in the periphery of the visual field are far more common and therefore might “benefit” from the existence of a motion-induced localization bias. As expected, task difficulty increased with increasing speed and eccentricity. It seems interesting to further compare action and perception tasks in terms of factors influencing the localization bias in these different task types}, department = {Department B{\"u}lthoff}, web_url = {http://www.theava.net/abstracts/ava2006.doc}, event_place = {Bradford, UK}, event_name = {AVA Annual Meeting 2006: Vision in Perception and Cognition}, author = {Friedrich, B and Caniard, F and Thornton, IM and Chatziastros, A and Mammasian, P} } @Poster { 3535, title = {Can auditory cues influence the visually induced self-motion illusion?}, journal = {Perception}, year = {2005}, month = {8}, volume = {34}, number = {ECVP Abstract Supplement}, pages = {82}, abstract = {It is well known that a moving visual stimulus covering a large part of the visual field can induce compelling illusions of self-motion ('vection'). Lackner (1977 Aviation Space and Environmental Medicine 48 129 - 131) showed that sound sources rotating around a blindfolded person can also induce vection. In the current study, we investigated visuo-auditory interactions for circular vection by testing whether adding an acoustic landmark that moves together with the visual stimulus enhances vection. Twenty observers viewed a photorealistic scene of a market place that was projected onto a curved projection screen (FOV 54 deg \(\times\) 40 deg). In each trial, the visual scene rotated at 30\(^{\circ}\) s-1 around the Earth's vertical axis. Three conditions were randomised in a within-subjects design: no-sound, mono-sound, and spatialised-sound (moving together with the visual scene) played through headphones using a generic head-related transfer function (HRTF). We used sounds of flowing water, which matched the visual depiction of a fountain that was visible in the market scene. Participants indicated vection onset by deflecting the joystick in the direction of perceived self-motion. The convincingness of the illusion was rated on an 11-point scale (0 - 100\%). Only the spatialised-sound that moved according to the visual stimulus increased vection significantly: convincingness ratings increased from 60.2\% for mono-sound to 69.6\% for spatialised-sound (t19 = -2.84, p = 0.01), and the latency from vection onset until saturated vection decreased from 12.5 s for mono-sound to 11.1 s for spatialised-sound (t19 = 2.69, p = 0.015). In addition, presence ratings assessed by the IPQ presence questionnaire were slightly but significantly increased. Average vection onset times, however, were not affected by the auditory stimuli. We conclude that spatialised-sound that moves concordantly with a matching visual stimulus can enhance vection. The effect size was, however, rather small (15\%). In a control experiment, we will investigate whether this might be explained by a ceiling effect, since visually induced vection was already quite strong. These results have important implications for our understanding of multi-modal cue integration during self-motion.}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/poster_jsp_ECVP2005_4web_[0].pdf}, department = {Department B{\"u}lthoff}, web_url = {http://pec.sagepub.com/content/34/1_suppl.toc}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {A Coru{\~n}a, Spain}, event_name = {28th European Conference on Visual Perception}, language = {en}, DOI = {10.1177/03010066050340S101}, author = {Schulte-Pelkum, J and Riecke, BE and Caniard, F and B{\"u}lthoff, HH} } @Poster { FriedrichCTCM2005, title = {Motion-induced localisation bias in a motor control task}, journal = {Perception}, year = {2005}, month = {8}, volume = {34}, number = {ECVP Abstract Supplement}, pages = {123}, abstract = {A moving carrier behind a stationary envelope can cause a perceptual misplacement of this envelope in the direction of the motion (De Valois and De Valois, 1991 Vision Research 31 1619 - 1626). Yamagishi et al (2001 Proceedings of the Royal Society 268 973 - 977) showed that this effect can also be found in visuomotor localisation tasks. We created a motor task in which a vertically moving, curved path on a monitor had to be kept aligned with the centre of a Gabor (stationary Gaussian and moving carrier either horizontally or vertically). The seventeen participants controlled the horizontal position of the path with a joystick. According to previous findings, we expected that the motion of the carrier would elicit a misalignment between path and carrier, with a relative displacement in the direction of the motion of the carrier. We found such a bias. Speed, orientation, and eccentricity of the Gabor were manipulated. The bias was enhanced with increasing speed and the orientation determines the direction of the perceptual misplacement. In addition, large eccentricities created an asymmetry in the bias: the bias was greater for inward than outward motion. Implications of these findings for the general understanding of this bias are discussed.}, department = {Department B{\"u}lthoff}, web_url = {http://pec.sagepub.com/content/34/1_suppl.toc}, event_place = {A Coru{\~n}a, Spain}, event_name = {28th European Conference on Visual Perception}, DOI = {10.1177/03010066050340S101}, author = {Friedrich, B and Caniard, F and Thornton, IM and Chatziastros, A and Mamassian, P} } @Poster { 3232, title = {Auditory cues can facilitate the visually-induced self-motion illusion (circular vection) in Virtual Reality}, year = {2005}, month = {2}, volume = {8}, pages = {74}, abstract = {There is a long tradition of investigating the self-motion illusion induced by rotating visual stimuli (''circular vection''). Recently, Larsson et al. (2004)[1] showed that up to 50\% of participants could also get some vection from rotating sound sources while blindfolded, replicating findings from Lackner (1977)[2]. Compared to the compelling visual illusion, though, auditory vection is rather weak and much less convincing. Here, we tested whether adding an acoustic landmark to a rotating visual photorealistic stimulus of a natural scene can improve vection. Twenty observers viewed rotating stimuli that were projected onto a curved projection screen (FOV: 54\(^{\circ}\)x40.5\(^{\circ}\)). The visual scene rotated around the earth-vertical axis at 30\(^{\circ}\)/s. Three conditions were randomized in a repeated measures within-subject design: No-sound, mono-sound, and 3D-sound using a generic head-related transfer function (HRTF). Adding mono-sound showed only minimal tendencies towards increased vection and did not affect presence-ratings at all, as assessed using the Schubert et al. (2001) presence questionnaire [3]. Vection was, however, slightly but significantly improved by adding a rotating 3D-sound source that moved in accordance with the visual scene: Convincingness ratings increased from 60.2\% (mono-sound) to 69.6\% (3D-sound) (t(19)=-2.84, p=.01), and vection buildup-times decreased from 12.5s (mono-sound) to 11.1s (3D-sound) (t(19)=2.69, p=.015). Furthermore, overall presence ratings were increased slightly but significantly. Note that vection onset times were not significantly affected (9.6s vs. 9.9s, p>.05). We conclude that adding spatialized 3D-sound that moves concordantly with a visual self-motion simulation does not only increase overall presence, but also improves the self-motion sensation itself. The effect size for the vection measures was, however, rather small (about 15\%), which might be explained by a ceiling effect, as visually induced vection was already quite strong without the 3D-sound (9.9s vection onset time). Merely adding non-spatialized (mono) sound did not show any clear effects. These results have important implications for the understanding or multi-modal cue integration in general and self-motion simulations in Virtual Reality in particular.}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/pdf3232.pdf}, department = {Department B{\"u}lthoff}, web_url = {http://www.twk.tuebingen.mpg.de/twk05/abstract.php?_load_id=riecke01}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {T{\"u}bingen, Germany}, event_name = {8th T{\"u}bingen Perception Conference (TWK 2005)}, language = {en}, author = {Riecke, BE and Schulte-Pelkum, J and Caniard, F and B{\"u}lthoff, HH} } @Poster { FriedrichCCMT2005, title = {Motion-Induced Shift and Navigation in Virtual Reality}, year = {2005}, month = {2}, volume = {8}, pages = {176}, abstract = {De Valois and De Valois [1] showed that moving Gabors (cosine gratings windowed by a stationary 2-dimensional Gaussian envelope) are locally misperceived in their direction of motion. In a pointing task, Yamagishi, Anderson and Ashida [2] reported even stronger visuo-motor localization error especially when participants had to make a speeded response. Here, we examined motion-induced bias in the context of an active navigation task, a situation in which perception and action are tightly coupled. Participants were presented with a birds-eye view of a vertically moving contour that simulated observer motion along a path. Observers centrally fixated while the path and a moving Gabor target were presented peripherally. The task was to follow the path with the moving Gabor, whose position (left/right) and direction(towards left/right) were varied in separate blocks. Gabor eccentricity was constant relative to fixation, with observers adjusting their simulated position with a joystick. Deviations from the path were analyzed as a function of Gabor direction. We found large and consistent misalignment in the direction of the moving Gabor, indicating that global position/motion judgments during action can be strongly affected by irrelevant local motion signals.}, department = {Department B{\"u}lthoff}, web_url = {http://www.twk.tuebingen.mpg.de/twk05/programm.php}, event_place = {T{\"u}bingen, Germany}, event_name = {8th T{\"u}bingen Perception Conference (TWK 2005)}, author = {Friedrich, B and Caniard, F and Chatziastros, A and Mamassian, P and Thornton, IM} } @Poster { 2840, title = {Motion-induced shift and navigation in virtual reality}, journal = {Perception}, year = {2004}, month = {9}, volume = {33}, number = {ECVP Abstract Supplement}, pages = {164-165}, abstract = {De Valois and De Valois (1991 Vision Research 31 1619-1626) showed that moving Gabors (cosine gratings windowed by a stationary 2-dimensional Gaussian envelope) are locally misperceived in their direction of motion. In a pointing task, Yamagishi, Anderson and Ashida (2001 Proceedings of the Royal Society 268 973-977) reported even stronger visuo-motor localization error especially when participants had to make a speeded response. Here, we examined motion-induced bias in the context of an active navigation task, a situation in which perception and action are tightly coupled. Participants were presented with a birds-eye view of a vertically moving contour that simulated observer motion along a path. Observers centrally fixated while the path and a moving Gabor target were presented peripherally. The task was to follow the path with the moving Gabor, whose position (left/right) and direction(towards left/right) were varied in separate blocks. Gabor eccentricity was constant relative to fixation, with observers adjusting their simulated position with a joystick. Deviations from the path were analyzed as a function of Gabor direction. We found large and consistent misalignment in the direction of the moving Gabor, indicating that global position/motion judgments during action can be strongly affected by irrelevant local motion signals.}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/pdf2840.pdf}, department = {Department B{\"u}lthoff}, web_url = {http://pec.sagepub.com/content/33/1_suppl.toc}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {Budapest, Hungary}, event_name = {27th European Conference on Visual Perception}, DOI = {10.1068/ecvp04a}, author = {Friedrich, B and Caniard, F and Chatziastros, A and Mamassian, P and Thornton, IM} } @Conference { ThorntonCMB2011_2, title = {Active control does not eliminate motion-induced illusory displacement}, journal = {i-Perception}, year = {2011}, month = {7}, volume = {2}, number = {4}, pages = {209}, abstract = {When the sine-wave grating of a Gabor patch drifts to the left or right, the perceived position of the entire object is shifted in the direction of local motion. In the current work we explored whether active control of the physical position of the patch overcomes such motion induced illusory displacement. In Experiment 1 we created a simple computer game and asked participants to continuously guide a Gabor patch along a randomly curving path using a joystick. When the grating inside the Gabor patch was stationary, participants could perform this task without error. When the grating drifted to either left or right, we observed systematic errors consistent with previous reports of motion-induced illusory displacement. In Experiment 2 we created an iPad application where the built-in accelerometer tilt control was used to steer the patch through as series of “gates”. Again, we observed systematic guidance errors that depended on the direction and speed of local motion. In conclusion, we found no evidence that participants could adapt or compensate for illusory displacement given active control of the target.}, url = {http://www.kyb.tuebingen.mpg.defileadmin/user_upload/files/publications/2011/APCV-2011-Thornton.pdf}, department = {Department B{\"u}lthoff}, talk_type = {Abstract Talk}, web_url = {http://i-perception.perceptionweb.com/journal/I/article/ic209}, institution = {Max Planck Institute for Biological Cybernetics}, event_place = {Hong Kong}, event_name = {7th Asia-Pacific Conference on Vision (APCV 2011)}, DOI = {10.1068/ic209}, author = {Thornton, IM and Caniard, F and Mamassian, P and B{\"u}lthoff, HH} } @Conference { 3901, title = {Visually induced linear vection is enhanced by small physical accelerations}, year = {2006}, month = {6}, volume = {7}, pages = {33}, abstract = {Wong \& Frost (1981) showed that the onset latency of visually induced self-rotation illusions (circular vection) can be reduced by concomitant small physical motions (jerks). Here, we tested whether (a) such facilitation also applies for translations, and (b) whether the strength of the jerk (degree of visuo-vestibular cue conflict) matters. 14 na{\"i}ve observers rated onset, intensity, and convincingness of forward linear vection induced by photorealistic visual stimuli of a street of houses presented on a projection screen (FOV: 75\(^{\circ}\)\(\times\)58\(^{\circ}\)). For 2/3 of the trials, brief physical forward accelerations (jerks applied using a Stewart motion platform) accompanied the visual motion onset. Adding jerks enhanced vection significantly; Onset latency was reduced by 50\%, convincingness and intensity ratings increased by more than 60\%. Effect size was independent of visual acceleration (1.2 and 12m/s\verb=^=2) and jerk size (about 0.8 and 1.6m/s\verb=^=2 at participantsÂ’ head for 1 and 3cm displacement, respectively), and showed no interactions. Thus, quantitative matching between the visual and physical acceleration profiles might not be as critical as often believed as long as they match qualitatively and are temporally synchronized. These findings could be employed for improving the convincingness and effectiveness of low-cost simulators without the need for expensive, large motion platforms.}, url = {http://www.kyb.tuebingen.mpg.de/fileadmin/user_upload/files/publications/Riecke_poster4IMRF_2006_Visually\%20Induced\%20Linear\%20Vection\%20is\%20Enhanced\%20by\%20Small\%20Physical\%20Accelerations_4web_3901[0].pdf}, department = {Department B{\"u}lthoff}, talk_type = {Abstract Talk}, web_url = {http://imrf.mcmaster.ca/IMRF/2006/viewabstract.php?id=96}, institute = {Biologische Kybernetik}, organization = {Max-Planck-Gesellschaft}, event_place = {Dublin, Ireland}, event_name = {7th International Multisensory Research Forum (IMRF 2006)}, language = {en}, author = {Riecke, BE and Caniard, F and Schulte-Pelkum, J} } @Conference { ThorntonCFM2005, title = {Localisation errors during active control of a target object}, year = {2005}, month = {3}, day = {23}, pages = {3}, abstract = {When a drifting grating is viewed through a stationary aperture, the global position of the aperture is displaced in the direction of local motion (Ramachandran \& Anstis, 1990, Perception, 19, 611-616). The purpose of the current study was to assess whether such displacement continues to occur when observers actively control the global position of the aperture. We created a simple game in which observes where given a birds-eye-view of a curving pathway along which they had to guide a target object. The target object was a gabor patch with a spatial frequency of 1 cycle per degree and an extent of approximately 2.5 degrees visual angle. The pathway was scrolled downwards to create the impression that the object was moving upwards along the path at a constant velocity. The vertical position of the target object was fixed at the centre of the screen, and a joystick was used to adjust the horizontal position so that the aperture was always centred on the pathway. In separate blocks we varied the speed of local motion in the aperture from 0 to 3 cycles per second, in steps of 0.5 cycles. When the grating was stationary, observes were able to guide the target object along the path with virtually no errors. As the speed of local motion increased errors also increased, reaching an asymptote of 27 min arc at 1.5 cycles per second. These results suggest that active control of an object cannot overcome the perceptual displacements induced by the drifting grating.}, department = {Department B{\"u}lthoff}, talk_type = {Abstract Talk}, web_url = {http://www.theava.net/abstracts/ava2005ab.doc}, event_place = {Bristol, UK}, event_name = {AVA 2005 Annual Meeting: Active Vision}, author = {Thornton, IM and Caniard, F and Friedrich, B and Mammasian, P} }