@article {Samaha076687, author = {Jason Samaha and Bastien Boutonnet and Gary Lupyan}, title = {How prior knowledge prepares perception: Prestimulus oscillations carry perceptual expectations and influence early visual responses}, elocation-id = {076687}, year = {2016}, doi = {10.1101/076687}, publisher = {Cold Spring Harbor Laboratory}, abstract = {Perceptual experience results from a complex interplay of bottom-up input and prior knowledge about the world, yet the extent to which knowledge affects perception, the neural mechanisms underlying these effects, and the stages of processing at which these two sources of information converge, are still unclear. In a series of experiments we show that verbal cues not only help recognition of ambiguous {\textquotedblleft}Mooney{\textquotedblright} images, but improve accuracy and RTs in a same/different discrimination task. We then used electroencephalography (EEG) to better understand the mechanisms of this effect. The improved discrimination of images previously made meaningful was accompanied by a larger occipital-parietal P1 evoked response to the meaningful versus meaningless target stimuli. Time-frequency analysis of the interval between the two stimuli (just prior to the target stimulus) revealed increases in the power of posterior alpha-band (8-14 Hz) oscillations when the meaning of the stimuli to be compared was trained. The magnitude of the prestimulus alpha difference and the P1 amplitude difference was positively correlated across individuals. These results suggest that prior knowledge prepares the brain for upcoming perception via the modulation of prestimulus alpha-band oscillations, and that this preparatory state influences early (~120 ms) stages of subsequent visual processing.Significance Statement What we see is affected by what we know, but what kind of knowledge affects our perception, and at what stages of perceptual processing do such effects occur? We show that verbal hints vastly improve people{\textquoteright}s ability to recognize ambiguous images and improve objective performance on a visual discrimination task. Using electrophysiology (EEG) we then show that knowing in advance the meaning of an ambiguous image increases alpha-band oscillations prior to image onset and visual-evoked potentials show rapid enhancement ~120 ms following image onset. These results suggest that alpha is involved in bringing prior knowledge to bear on the interpretation of sensory stimuli, demonstrating that perception is constructed from both sensory input and prior knowledge about the word.}, URL = {https://www.biorxiv.org/content/early/2016/11/09/076687}, eprint = {https://www.biorxiv.org/content/early/2016/11/09/076687.full.pdf}, journal = {bioRxiv} }