@Article{ MachullaDE2016, title = {The consistency of crossmodal synchrony perception across the visual, auditory, and tactile senses}, journal = {Journal of Experimental Psychology: Human Perception and Performance}, year = {2016}, month = {7}, volume = {42}, number = {7}, pages = {1026-1038}, abstract = {Crossmodal judgments of relative timing commonly yield a nonzero point of subjective simultaneity (PSS). Here, we test whether subjective simultaneity is coherent across all pairwise combinations of the visual, auditory, and tactile modalities. To this end, we examine PSS estimates for transitivity: If Stimulus A has to be presented x ms before Stimulus B to result in subjective simultaneity, and B y ms before C, then A and C should appear simultaneous when A precedes C by z ms, where z = x + y. We obtained PSS estimates via 2 different timing judgment tasks—temporal order judgments (TOJs) and synchrony judgments (SJs)—thus allowing us to examine the relationship between TOJ and SJ. We find that (a) SJ estimates do not violate transitivity, and that (b) TOJ and SJ data are linearly related. Together, these findings suggest that both TOJ and SJ access the same perceptual representation of simultaneity and that this representation is globally coherent across the tested modalities. Furthermore, we find that (b) TOJ estimates are intransitive. This is consistent with the proposal that while the perceptual representation of simultaneity is coherent, relative timing judgments that access this representation can at times be incoherent with each other because of postperceptual response biases.}, web_url = {http://psycnet.apa.org/journals/xhp/42/7/1026.pdf}, state = {published}, DOI = {10.1037/xhp0000191}, author = {Machulla T-K{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst MO{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Article{ MachullaDFE2011, title = {Multisensory simultaneity recalibration: storage of the aftereffect in the absence of counterevidence}, journal = {Experimental Brain Research}, year = {2012}, month = {3}, volume = {217}, number = {1}, pages = {89-97}, abstract = {Recent studies show that repeated exposure to an asynchrony between auditory and visual stimuli shifts the point of subjective simultaneity. Usually, the measurement stimuli used to assess this aftereffect are interleaved with short re-exposures to the asynchrony. In a first experiment, we show that the aftereffect declines during measurement in spite of the use of re-exposures. In a second experiment, we investigate whether the observed decline is either due to a dissipation of the aftereffect with the passage of time, or the result of using measurement stimuli with a distribution of asynchronies different from the exposure stimulus. To this end, we introduced a delay before measuring the aftereffects and we compared the magnitude of the aftereffect with and without delay. We find that the aftereffect does not dissipate during the delay but instead is stored until new sensory information in the form of measurement stimuli is presented as counterevidence (i.e., stimuli with an asynchrony that differs from the one used during exposure).}, web_url = {http://www.springerlink.com/content/u7337tv890047w92/fulltext.pdf}, state = {published}, DOI = {10.1007/s00221-011-2976-5}, author = {Machulla TK{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Fr\"ohlich E{efroehl}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst MO{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Article{ 5774, title = {Within- and cross-modal distance information disambiguates visual size perception}, journal = {PLoS Computational Biology}, year = {2010}, month = {3}, volume = {6}, number = {3}, pages = {1-10}, abstract = {Perception is fundamentally underconstrained because different combinations of object properties can generate the same sensory information. To disambiguate sensory information into estimates of scene properties, our brains incorporate prior knowledge and additional “auxiliary” (i.e., not directly relevant to desired scene property) sensory information to constrain perceptual interpretations. For example, knowing the distance to an object helps in perceiving its size. The literature contains few demonstrations of the use of prior knowledge and auxiliary information in combined visual and haptic disambiguation and almost no examination of haptic disambiguation of vision beyond “bistable” stimuli. Previous studies have reported humans integrate multiple unambiguous sensations to perceive single, continuous object properties, like size or position. Here we test whether humans use visual and haptic information, individually and jointly, to disambiguate size from distance. We presented participants with a ball moving in depth with a changing diameter. Because no unambiguous distance information is available under monocular viewing, participants rely on prior assumptions about the ball's distance to disambiguate their -size percept. Presenting auxiliary binocular and/or haptic distance information augments participants' prior distance assumptions and improves their size judgment accuracy—though binocular cues were trusted more than haptic. Our results suggest both visual and haptic distance information disambiguate size perception, and we interpret these results in the context of probabilistic perceptual reasoning.}, web_url = {http://www.ploscompbiol.org/article/info%3Adoi%2F10.1371%2Fjournal.pcbi.1000697}, state = {published}, DOI = {10.1371/journal.pcbi.1000697}, EPUB = {e1000697}, author = {Battaglia PW{batt0086}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Research Group Multisensory Perception and Action}; Ernst MO{marc}{Research Group Multisensory Perception and Action}; Schrater PR; Machulla T{tonja}{Research Group Multisensory Perception and Action}; Kersten D{kersten}} } @Article{ 5870, title = {Recalibration of multisensory simultaneity: Cross-modal transfer coincides with a change in perceptual latency}, journal = {Journal of Vision}, year = {2009}, month = {12}, volume = {9}, number = {12:7}, pages = {1-16}, abstract = {After exposure to asynchronous sound and light stimuli, perceived audio-visual synchrony changes to compensate for the asynchrony. Here we investigate to what extent this audio-visual recalibration effect transfers to visual-tactile and audiotactile simultaneity perception in order to infer the mechanisms responsible for temporal recalibration. Results indicate that audio-visual recalibration of simultaneity can transfer to audio-tactile and visual-tactile stimuli depending on the way in which the multisensory stimuli are presented. With presentation of co-located multisensory stimuli, we found a change in the perceptual latency of the visual stimuli. Presenting auditory stimuli through headphones, on the other hand, induced a change in the perceptual latency of the auditory stimuli. We argue that the difference in transfer depends on the relative trust in the auditory and visual estimates. Interestingly, these findings were confirmed by showing that audio-visual recalibration influences simple reaction time to visual and auditory stimuli. Presenting co-located stimuli during asynchronous exposure induced a change in reaction time to visual stimuli, while with headphones the change in reaction time occurred for the auditory stimuli. These results indicate that the perceptual latency is altered with repeated exposure to asynchronous audio-visual stimuli in order to compensate (at least in part) for the presented asynchrony.}, web_url = {http://journalofvision.org/9/12/7/DiLuca-2009-jov-9-12-7.pdf}, state = {published}, DOI = {10.1167/9.12.7}, author = {Di Luca M{max}{Research Group Multisensory Perception and Action}; Machulla T-K{tonja}{Research Group Multisensory Perception and Action}; Ernst MO{marc}{Research Group Multisensory Perception and Action}} } @Inproceedings{ GreisESMS2016, title = {Decision-Making under Uncertainty: How the Amount of Presented Uncertainty Influences User Behavior}, year = {2016}, month = {10}, pages = {52}, abstract = {In everyday life, people regularly make decisions based on uncertain data, e.g., when using a navigation device or looking at the weather forecast. In our work, we compare four representations that communicate different amounts of uncertainty information to the user. We compared them in a study by publishing a web-based game on Facebook. In total, 44 users played 991 turns. We analyzed the turns by logging game metrics such as the gain per turn and included a survey element. The results show that abundance of uncertainty information leads to taking unnecessary risks. However, representations with aggregated detailed uncertainty provide a good trade-off between being understandable by the players and encouraging medium risks with high gains. Absence of uncertainty information reduces the risk taking and leads to more won turns, but with the lowest money gain.}, web_url = {http://dl.acm.org/citation.cfm?id=2971535&CFID=878266016&CFTOKEN=15881139}, editor = {Björk, S. , E. Eriksson}, publisher = {ACM Press}, address = {New York, NY, USA}, event_name = {9th Nordic Conference on Human-Computer Interaction (NordiCHI '16)}, event_place = {Göteborg, Sweden}, state = {published}, ISBN = {978-1-4503-4763-1}, DOI = {10.1145/2971485.2971535}, author = {Greis M; El. Agroudy P; Schuff H; Machulla T{tonja}; Schmidt A} } @Inproceedings{ VoitMWSSH2016, title = {Exploring notifications in smart home environments}, year = {2016}, month = {9}, pages = {942-947}, abstract = {Notifications are a core mechanism of current smart devices. They inform about a variety of events including messages, social network comments, and application updates. While users appreciate the awareness that notifications provide, notifications cause distraction, higher cognitive load, and task interruptions. With the increasing importance of smart environments, the number of sensors that could trigger notifications will increase dramatically. A flower with a moisture sensor, for example, could create a notification whenever the flower needs water. We assume that current notification mechanisms will not scale with the increasing number of notifications. We therefore explore notification mechanisms for smart homes. Notifications are shown on smartphones, on displays in the environment, next to the sending objects, or on the user's body. In an online survey, we compare the four locations in four scenarios. While different aspects influence the perceived suitability of each notification location, the smartphone generally is rated the best.}, web_url = {http://dl.acm.org/citation.cfm?id=2962661&CFID=878266016&CFTOKEN=15881139}, editor = {Paternò, V. , K. Väänänen}, publisher = {ACM Press}, address = {New York, NY, USA}, event_name = {18th International Conference on Human-Computer Interaction with Mobile Devices and Services Adjunct (MobileHCI '16)}, event_place = {Firenze, Italy}, state = {published}, ISBN = {978-1-4503-4413-5}, DOI = {10.1145/2957265.2962661}, author = {Voit A; Machulla T{tonja}; Weber D; Schwind V; Schneegans S; Henze N} } @Inproceedings{ ElAgroudyMRDFSWC2016, title = {Impact of Reviewing Lifelogging Photos on Recalling Episodic Memories}, year = {2016}, month = {9}, pages = {1014-1019}, abstract = {Photos are a rich and popular form for preserving memories. Thus, they are widely used as cues to augment human memory. Near-continuous capture and sharing of photos have generated a need to summarize and review relevant photos to revive important events. However, there is limited work on exploring how regular reviewing of selected photos influence overall recall of past events. In this paper, we present an experiment to investigate the effect of regular reviewing of egocentric lifelogging photos on the formation and retrieval of autobiographic memories. Our approach protects the privacy of the participants and provides improved validation for their memory performance compared to existing approaches. The results of our experiment are a step towards developing memory shaping algorithms that accentuate or attenuate memories on demand.}, web_url = {http://recall-fet.eu/wahm16/}, editor = {Lukowicz, P. , A. Krüger}, publisher = {ACM Press}, address = {New York, NY, USA}, event_name = {ACM International Joint Conference on Pervasive and Ubiquitous Computing (UbiComp '16)}, event_place = {Heidelberg, Germany}, state = {published}, ISBN = {978-1-4503-4462-3}, DOI = {10.1145/2968219.2968562}, author = {El.Agroudy P; Machulla T{tonja}; Rzayev R; Dingler T; Funk M; Schmidt A; Ward G; Clinch S} } @Poster{ diLucaMBE2011, title = {Recalibration of audiovisual simultaneity}, year = {2011}, month = {9}, web_url = {http://www.bccn-tuebingen.de/events/bernstein-symposium-series-2011/symposium-d.html}, event_name = {Bernstein Cluster D Symposium: Multisensory Perception and Action}, event_place = {Tübingen, Germany}, state = {published}, author = {Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Barnett-Cowan M{mbc}{Department Human Perception, Cognition and Action}; Ernst MO{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Poster{ 6488, title = {Does audiovisual temporal recalibration store without stimulation?}, journal = {Journal of Vision}, year = {2010}, month = {5}, volume = {10}, number = {7}, pages = {1414}, abstract = {Recent studies have investigated adaptation to temporal discrepancies between different sensory modalities by first exposing participants to asynchronous multisensory signals, and subsequently assessing the magnitude of the adaptation effect (the size of the shift in subjective simultaneity). Although never reported, there is reason to assume that the strength of the adaptation effect declines during this measurement period. Usually, short re-exposures are interleaved with testing to prevent such declining. In the present study, we show that a decrease in the strength of adaptation still can take place, even when a common re-exposure procedure is used. In a second experiment, we investigated whether the observed decline is due to: (1) a dissipation of adaptation with the passage of time or, (2) a new adaptation induced by the test stimuli. We find that temporal adaptation does not dissipate with time but is stored until new sensory information, i.e., stimuli that differ from those used during the adaptation procedure, is presented. An alternative explanation, namely that adaptation decays over time but is re-established before the first test trial due to the experimental procedure we chose, is addressed in a control experiment. This finding is discussed in terms of Helson's adaptation level (AL) theory [1947, Adaptation-level as frame of reference for prediction of psychophysical data. The American Journal of Psychology, 60, 1–29], according to which the null point of any perceptual dimension, in our case the perception of simultaneity on the dimension of temporal order, is a summarizing statistic of all stimuli presented in the past. Any single stimulus pulls the AL toward its own value, and any single stimulus is judged as though it was being compared with the current AL.}, web_url = {http://www.journalofvision.org/content/10/7/1414}, event_name = {10th Annual Meeting of the Vision Sciences Society (VSS 2010)}, event_place = {Naples, FL, USA}, state = {published}, DOI = {10.1167/10.7.1414}, author = {Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst MO{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Poster{ 5172, title = {Humans use stereo and haptic distance cues to improve physical object size estimates}, journal = {Journal of Vision}, year = {2008}, month = {6}, volume = {8}, number = {6}, pages = {1090}, abstract = {An object's visual image size is an ambiguous cue to its physical size. But if the object's distance is known, the physical size may be disambiguated and more accurately estimated. We asked whether humans use distance cues to improve size judgments. We presented participants with a virtual ball that changed in physical size (imagine a balloon inflating or deflating) as well as distance simultaneously, and asked them to discriminate whether the physical size increased or decreased. With only visual image size information, size-change discrimination was poor. When additional haptic and/or stereo distance-change cues were provided, size-change judgments improved significantly. We conclude that the brain exploits its knowledge of how image size, physical size, and distance are related to improve perceptual size judgments. We compared participants' use of distance cues with predictions of an ideal observer that incorporates distance cues in proportion to their reliability to quantify human behavior. We independently measured participants' stereo and haptic distance discrimination performance, applied these empirical reliability measurements in the ideal model, and found participants use stereo information to a similar degree as the ideal observer, but use haptic information less than the ideal observer. This result was confirmed by an additional conflict condition in which haptic and stereo distance-change cues indicated different values and their relative use could be measured. Lastly, we ran a condition in which participants gripped the object with two fingers, so that a direct size-change cue was available, and found participants integrated direct and indirect size-change cues to improve performance.}, web_url = {http://www.journalofvision.org/8/6/1090/}, event_name = {8th Annual Meeting of the Vision Sciences Society (VSS 2008)}, event_place = {Naples, FL, USA}, state = {published}, digital = {1}, DOI = {10.1167/8.6.1090}, author = {Battaglia P{batt0086}{Research Group Multisensory Perception and Action}; Ernst M{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Schrater P; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Kersten D{kersten}{Department Human Perception, Cognition and Action}} } @Poster{ 4522, title = {Perception of crossmodal simultaneity is not transitive}, journal = {Perception}, year = {2007}, month = {8}, volume = {36}, number = {ECVP Abstract Supplement}, pages = {209}, abstract = {When signals in different modalities are physically asynchronous, observers may still perceive them as being simultaneous due to differential physical transmission and physiological conduction delays. If sensory signals in different modalities are processed independently of each other as assumed by independent-channels models, then the relative timings that lead to perceived simultaneity should be transitive across several modality pairs. For instance, if modality A has to be presented 20 ms before modality B to seem simultaneous with it and modality B 10 ms before modality C, then A should be presented 30 ms before C to seem simultaneous with it. Using temporal order judgments we measured the point of subjective simultaneity (PSS) in three different modality pairs (visual-auditory, tactile-auditory, visual-tactile). Our results indicate that PSS are not transitive. Thus, we infer that signals are not processed independently from each other. Perceived signal timing in one modality depends on which other modality it is paired with. Therefore, independent-channels models cannot account for processes underlying decisions about simultaneity of signals in different modalities.}, web_url = {http://pec.sagepub.com/content/36/1_suppl.toc}, event_name = {30th European Conference on Visual Perception}, event_place = {Arezzo, Italy}, state = {published}, DOI = {10.1177/03010066070360S101}, author = {Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst M{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Poster{ 4519, title = {Temporal adaptation influences non-adapted modality pairs}, journal = {Perception}, year = {2007}, month = {8}, volume = {36}, number = {ECVP Abstract Supplement}, pages = {207}, abstract = {Repeated presentations of asynchronous audiovisual signals recalibrate the point of subjective simultaneity. It is not clear whether this effect is the result of an adaptation mechanism specific to the audiovisual modality pair or whether it is due to a mechanism common to all modalities. Only in the latter case we expect that repeated asynchronous audiovisual information influences perceived simultaneity in other modality pairs (audiotactile or visuotactile). We presented a series of asynchronous audiovisual signals to the participants (SOA: 200ms, -200ms) and then estimated the point of subjective simultaneity for three modality pairs (audiovisual, audiotactile, visuotactile). Consistent with previous research, perceived simultaneity in the audiovisual modality pair changed for the two SOAs. Subjective simultaneity shifted also in the audiotactile modality pair. Hence, we conclude that the three tested modalities share a common adaptation mechanism. Moreover, since the visuotactile modality pair was not significantly affected by the manipulation, audiovisual adaptation is likely the result of a phenomenal shift of the auditory events in time.}, web_url = {http://pec.sagepub.com/content/36/1_suppl.toc}, event_name = {30th European Conference on Visual Perception}, event_place = {Arezzo, Italy}, state = {published}, DOI = {10.1177/03010066070360S101}, author = {Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst M{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Poster{ 4521, title = {Crossmodal simultaneity is not transitive}, year = {2007}, month = {7}, volume = {10}, pages = {85}, abstract = {Sensory processing times can greatly differ between the senses. Hence, signals from different modalities that are presented with a delay corresponding to the processing time difference between these modalities appear simultaneous to the observer. We hypothesized that if processing is independent for each modality and if there is a common mechanism for the perception of simultaneity across modalities then subjective simultaneity should be transitive. For example, if modality A has to be presented 20ms before B and modality B 10ms before C to be perceived as synchronous, then modality A should be presented 30ms before C to seem synchronous with it. Observers judged the temporal order of three different modality pairs (visual-tactile, tactile-auditory, and visual-auditory) for eleven stimulus onset asynchronies. Stimuli from the three conditions were not blocked but presented randomly to prevent attentional prior-entry effects that might lead to artifactual intransitivity. From the responses, we determined the presentation delay leading to subjective simultaneity. To appear synchronous the visual signal has to be presented 34ms before the tactile, the tactile 55ms before the auditory, and the visual 28ms before the auditory. These results deviate significantly from transitivity. We conclude that either stimulus processing time in one modality depends on which other modality it is paired with, or the notion of a common mechanism for crossmodal simultaneity has to be rejected.}, web_url = {http://www.twk.tuebingen.mpg.de/twk07/abstract.php?_load_id=machulla01}, event_name = {10th Tübinger Wahrnehmungskonferenz (TWK 2007)}, event_place = {Tübingen, Germany}, state = {published}, digital = {1}, author = {Machulla TK{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst MO{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Poster{ 4518, title = {Perceived timing across modalities}, year = {2007}, month = {7}, volume = {2007}, pages = {15}, abstract = {Crossmodal stimuli can be perceived as being simultaneous even if they are not physically synchronous. This phenomenon has been attributed to different conduction delays. In this work we tested whether time in different modalities is processed independently or if crossmodal interaction influence the perception of synchrony. (1) If unimodal timing is processed independently, perceived simultaneity across modality pairs should be Transitive. For example, if modality A has to be presented 20ms before modality B to appear simultaneous and modality B 10ms before modality C, then A should be presented 30ms before C to appear simultaneous. Subjects made Temporal Order Judgments (TOJ) of asynchronous signals in three modality pairs (audio-visual, audio-tactile, visual-tactile). The Point of Subjective Simultaneity (PSS) calculated for each modality pair are not transitive, indicating that perceived time is not processed independently in each modality. (2) It has been shown that PSS of audio-visual signals can be recalibrated by the repeated presentation of asynchronous stimuli. It is not clear whether this effect is the result of an adaptation mechanism specific to the audio-visual modality pair or whether it is due to a common crossmodal mechanism. Using the same type of measurements, we show that PSS following presentation of an asynchronous audio-visual stimulus is not constant in the audio-tactile modality pair. Hence, crossmodal timing is also affected by a common adaptation mechanism. Since PSS for visual-tactile stimuli was not affected, audio-visual adaptation effects are likely the result of a phenomenal shift of the auditory events in time. Our results indicate that perceived timing in one modality depends on which other modality this is paired with and that perceived simultaneity changes also for non adapted modality pairs. These results are not consistent with independent-channels models of crossmodal timing, but they rather indicate that time perception is affected by crossmodal interactions.}, file_url = {fileadmin/user_upload/files/publications/Perception-Action-Symposium-2007-DiLuca.pdf}, web_url = {http://www.hcsnet.edu.au/hcsnetevents/2007/pasymposium/}, event_name = {International Intersensory Research Symposium 2007: Perception and Action}, event_place = {Sydney, Australia}, state = {published}, author = {Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst M{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Poster{ 4684, title = {Temporal calibration between the visual, auditory and tactile senses: A psychophysical approach}, year = {2007}, month = {7}, volume = {1}, pages = {36-37}, abstract = {Human observers acquire information about physical properties of the environment through different sensory modalities. For natural events, these sensory signals show a specific temporal, spatial and contextual configuration that aids the integration into a coherent multisensory percept. For multimodal virtual environments, however, signals have to be created and displayed separately for different modalities, which may result in a miscalibration of these signals. This, in turn, can greatly reduce the observer’s sense of immersion and presence. Using psychophysical methods, we investigate fundamental questions regarding how the temporal alignment of signals from the visual, auditory and tactile modalities is achieved. A first project examines the perception of subjective simultaneity of signals. Simultaneity detection poses a non-trivial matching problem to the human brain: physical and neural transmission times differ greatly between the senses. As there is only partial compensation for these differential delays, subjective simultaneity may result from presenting stimuli with a physical delay. Here, we are interested in whether this phenomenon reflects an amodal timing mechanism that works across all modalities in a uniform fashion. Further, we look at the sensitivity for asynchrony detection for different modality pairs as well as at interindividual differences. In a second project, we examine the ability of the human cognitive system to adapt to asynchronous information in different modalities. Adaptation may be used to reduce the disruptive effects of temporal miscalibration between signals in different modalities. We are interested in the strength of adaptation as well as the mechanism underlying this effect. Future projects aim at the investigation of - the precise relationship between the perception of synchrony and multimodal integration, - the influence of prior knowledge about a common origin of signals on the perception of synchrony - the influence of timing on the perception of cause and effect - the neural basis of the detection of synchrony In conclusion, our research seeks to understand the mechanisms underlying temporal calibration between different sensory modalities with the goal to identify factors that foster multimodal integration and, in turn, the sense of presence.}, web_url = {http://peach.iti.gr/PeachSS1_final.pdf}, event_name = {1st Peach Summer School}, event_place = {Santorini, Greece}, state = {published}, author = {Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst M{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} } @Conference{ 5064, title = {Recalibration of Audiovisual Synchrony: What is changing?}, year = {2008}, month = {3}, day = {4}, volume = {50}, pages = {56}, abstract = {Both physical and physiological transmission times can differ between audition and vision. Under certain conditions, the brain reduces perceived asynchrony by adapting to this temporal discrepancy. In two experiments we investigated whether this recalibration is specific to auditory and visual stimuli, or whether other modality combinations (audiotactile, visuotactile) are affected, as well. We presented asynchronous audiovisual signals, with either auditory leading or visual leading. Then, using temporal order judgments we measured observers’ point of subjective simultaneity for three modality combinations. Results indicate an adjustment of perceived simultaneity for the audiovisual and the visuotactile modality pairs. We conclude that audiovisual adaptation is the result of a change of processing latencies of visual events. In a second experiment, we corroborate this finding. We demonstrate that reaction times to visual signals, but not to tactile or auditory signals, change as a result of audiovisual recalibration.}, web_url = {https://www.teap.de/memory/Abstractband_50_2008_marburg.pdf}, event_name = {50. Tagung Experimentell Arbeitender Psychologen (TeaP 2008)}, event_place = {Marburg, Germany}, state = {published}, author = {Machulla T{tonja}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Di Luca M{max}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}; Ernst M{marc}{Department Human Perception, Cognition and Action}{Research Group Multisensory Perception and Action}} }