@ARTICLE{10.3389/fnhum.2013.00293, AUTHOR={Costanzo, Michelle and McArdle, Joseph and Swett, Bruce and Kemeny, Stefan and Xu, Jiang and Braun, Allen}, TITLE={Spatial and temporal features of superordinate semantic processing studied with fMRI and EEG}, JOURNAL={Frontiers in Human Neuroscience}, VOLUME={7}, YEAR={2013}, URL={https://www.frontiersin.org/articles/10.3389/fnhum.2013.00293}, DOI={10.3389/fnhum.2013.00293}, ISSN={1662-5161}, ABSTRACT={The relationships between the anatomical representation of semantic knowledge in the human brain and the timing of neurophysiological mechanisms involved in manipulating such information remain unclear. This is the case for superordinate semantic categorization—the extraction of general features shared by broad classes of exemplars (e.g., living vs. non-living semantic categories). We proposed that, because of the abstract nature of this information, input from diverse input modalities (visual or auditory, lexical or non-lexical) should converge and be processed in the same regions of the brain, at similar time scales during superordinate categorization—specifically in a network of heteromodal regions, and late in the course of the categorization process. In order to test this hypothesis, we utilized electroencephalography and event related potentials (EEG/ERP) with functional magnetic resonance imaging (fMRI) to characterize subjects' responses as they made superordinate categorical decisions (living vs. non-living) about objects presented as visual pictures or auditory words. Our results reveal that, consistent with our hypothesis, during the course of superordinate categorization, information provided by these diverse inputs appears to converge in both time and space: fMRI showed that heteromodal areas of the parietal and temporal cortices are active during categorization of both classes of stimuli. The ERP results suggest that superordinate categorization is reflected as a late positive component (LPC) with a parietal distribution and long latencies for both stimulus types. Within the areas and times in which modality independent responses were identified, some differences between living and non-living categories were observed, with a more widespread spatial extent and longer latency responses for categorization of non-living items.} }