@article {Zimmer:2003:0736-2501:210, title = "Deriving ratio-scale measures of sound quality from preference judgments", journal = "Noise Control Engineering Journal", parent_itemid = "infobike://ince/ncej", publishercode ="ince", year = "2003", volume = "51", number = "4", publication date ="2003-07-01T00:00:00", pages = "210-215", itemtype = "ARTICLE", issn = "0736-2501", url = "https://ince.publisher.ingentaconnect.com/content/ince/ncej/2003/00000051/00000004/art00003", doi = "doi:10.3397/1.2839716", keyword = "79, 63.2", author = "Zimmer, Karin and Ellermeier, Wolfgang", abstract = "One of the major goals of sound-quality research has been to develop automated, objective metrics of perceptual attributes. These metrics must be validated against subjective measures of the attributes that they claim to capture. To that end, typically, verbal reports are collected on complex psychophysical attributes from observers directly. That procedure, however, involves the risk of accumulating data of unknown validity, dimensionality, and unit. Rather than getting at the dimensions of interest directly, this paper advocates asking all but very simple judgments of preference from observers. Such data are then used to model the listeners' decision strategies when comparing auditory events. Once a valid decision model has been established, psychophysical scale values can be derived. Two approaches, the Bradley-Terry-Luce model, and the representation of paired comparisons by 'preference trees' are elaborated, and illustrated with examples from sound-quality research. It is demonstrated that these 'indirect' approaches offer the advantage of (a) an explicitly stated theory about the observer's decision strategy, (b) built-in checks of the consistency of judgments, and (c) statistical tests to validate if the attempt at scale construction succeeded. In that way, these indirect methods (d) reveal the dimensional structure behind psychoacoustical judgments, and (e) provide the opportunity to discover 'new,' i.e. as yet undetected, auditory attributes.", }